+ echo Logging output to /data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp8_stage4.sh/20241128_234743//log_node19.txt Logging output to /data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp8_stage4.sh/20241128_234743//log_node19.txt + export ASCEND_PROCESS_LOG_PATH=/data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp8_stage4.sh/20241128_234743//ascend/19 + ASCEND_PROCESS_LOG_PATH=/data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp8_stage4.sh/20241128_234743//ascend/19 + mkdir -p /data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp8_stage4.sh/20241128_234743//ascend/19 + DATA_PATH=/local_disk/cognitron_vl//configs/lcvlm_finetune_stage4.yaml + TOKENIZER_PATH=/data_4/models/Qwen/Qwen2.5-14B-Instruct/ + CKPT_LOAD_DIR=/data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp2_stage3.sh/20241127_204213/ + VIT_CKPT_LOAD_DIR=/ + CKPT_SAVE_DIR=/data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp8_stage4.sh/20241128_234743// + rsync -avh /local_disk/cognitron_vl//configs/lcvlm_finetune_stage4.yaml /data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp8_stage4.sh/20241128_234743/ sending incremental file list sent 71 bytes received 12 bytes 166.00 bytes/sec total size is 23.84K speedup is 287.17 + cd /local_disk/cognitron_vl/ + rm -fr datasets + mkdir -p datasets + ln -s /data/data/ datasets/CV + ln -s /data/data/LLM datasets/LLM + ln -s /data/data/LMM datasets/LMM + source /local_disk/cognitron_vl//scripts/set_env_mg_npu.sh ++ source /usr/local/Ascend/driver/bin/setenv.bash +++ DEP_INFO_FILE=/etc/ascend_install.info +++ [[ -f /etc/ascend_install.info ]] +++ . /etc/ascend_install.info +++ DRV_LIB64_COMMON_LDPATH=/driver/lib64/common +++ DRV_LIB64_DRV_LDPATH=/driver/lib64/driver +++ DRV_LIB64_LDPATH=/driver/lib64 +++ export LD_LIBRARY_PATH=/driver/lib64/common:/driver/lib64/driver:/driver/lib64:/usr/local/Ascend/ascend-toolkit/latest/tools/aml/lib64:/usr/local/Ascend/ascend-toolkit/latest/tools/aml/lib64/plugin:/usr/local/Ascend/ascend-toolkit/latest/lib64:/usr/local/Ascend/ascend-toolkit/latest/lib64/plugin/opskernel:/usr/local/Ascend/ascend-toolkit/latest/lib64/plugin/nnengine:/usr/local/Ascend/ascend-toolkit/latest/opp/built-in/op_impl/ai_core/tbe/op_tiling/lib/linux/x86_64:/usr/local/Ascend/driver/lib64:/usr/local/Ascend/driver/lib64/common:/usr/local/Ascend/driver/lib64/driver:/usr/local/Ascend/driver/lib64/common:/usr/local/Ascend/driver/lib64/driver:/usr/lib/x86_64-linux-gnu/hdf5/serial: +++ LD_LIBRARY_PATH=/driver/lib64/common:/driver/lib64/driver:/driver/lib64:/usr/local/Ascend/ascend-toolkit/latest/tools/aml/lib64:/usr/local/Ascend/ascend-toolkit/latest/tools/aml/lib64/plugin:/usr/local/Ascend/ascend-toolkit/latest/lib64:/usr/local/Ascend/ascend-toolkit/latest/lib64/plugin/opskernel:/usr/local/Ascend/ascend-toolkit/latest/lib64/plugin/nnengine:/usr/local/Ascend/ascend-toolkit/latest/opp/built-in/op_impl/ai_core/tbe/op_tiling/lib/linux/x86_64:/usr/local/Ascend/driver/lib64:/usr/local/Ascend/driver/lib64/common:/usr/local/Ascend/driver/lib64/driver:/usr/local/Ascend/driver/lib64/common:/usr/local/Ascend/driver/lib64/driver:/usr/lib/x86_64-linux-gnu/hdf5/serial: +++ export PATH=/usr/local/Ascend/ascend-toolkit/latest/bin:/usr/local/Ascend/ascend-toolkit/latest/compiler/ccec_compiler/bin:/usr/local/Ascend/ascend-toolkit/latest/tools/ccec_compiler/bin:/root/miniconda3/envs/py38/bin:/root/miniconda3/condabin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/sbin:/usr/local/bin +++ PATH=/usr/local/Ascend/ascend-toolkit/latest/bin:/usr/local/Ascend/ascend-toolkit/latest/compiler/ccec_compiler/bin:/usr/local/Ascend/ascend-toolkit/latest/tools/ccec_compiler/bin:/root/miniconda3/envs/py38/bin:/root/miniconda3/condabin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/sbin:/usr/local/bin ++ source /usr/local/Ascend/ascend-toolkit/set_env.sh +++ export LD_LIBRARY_PATH=/usr/local/Ascend/driver/lib64:/usr/local/Ascend/driver/lib64/common:/usr/local/Ascend/driver/lib64/driver:/driver/lib64/common:/driver/lib64/driver:/driver/lib64:/usr/local/Ascend/ascend-toolkit/latest/tools/aml/lib64:/usr/local/Ascend/ascend-toolkit/latest/tools/aml/lib64/plugin:/usr/local/Ascend/ascend-toolkit/latest/lib64:/usr/local/Ascend/ascend-toolkit/latest/lib64/plugin/opskernel:/usr/local/Ascend/ascend-toolkit/latest/lib64/plugin/nnengine:/usr/local/Ascend/ascend-toolkit/latest/opp/built-in/op_impl/ai_core/tbe/op_tiling/lib/linux/x86_64:/usr/local/Ascend/driver/lib64:/usr/local/Ascend/driver/lib64/common:/usr/local/Ascend/driver/lib64/driver:/usr/local/Ascend/driver/lib64/common:/usr/local/Ascend/driver/lib64/driver:/usr/lib/x86_64-linux-gnu/hdf5/serial: +++ LD_LIBRARY_PATH=/usr/local/Ascend/driver/lib64:/usr/local/Ascend/driver/lib64/common:/usr/local/Ascend/driver/lib64/driver:/driver/lib64/common:/driver/lib64/driver:/driver/lib64:/usr/local/Ascend/ascend-toolkit/latest/tools/aml/lib64:/usr/local/Ascend/ascend-toolkit/latest/tools/aml/lib64/plugin:/usr/local/Ascend/ascend-toolkit/latest/lib64:/usr/local/Ascend/ascend-toolkit/latest/lib64/plugin/opskernel:/usr/local/Ascend/ascend-toolkit/latest/lib64/plugin/nnengine:/usr/local/Ascend/ascend-toolkit/latest/opp/built-in/op_impl/ai_core/tbe/op_tiling/lib/linux/x86_64:/usr/local/Ascend/driver/lib64:/usr/local/Ascend/driver/lib64/common:/usr/local/Ascend/driver/lib64/driver:/usr/local/Ascend/driver/lib64/common:/usr/local/Ascend/driver/lib64/driver:/usr/lib/x86_64-linux-gnu/hdf5/serial: +++ export ASCEND_TOOLKIT_HOME=/usr/local/Ascend/ascend-toolkit/latest +++ ASCEND_TOOLKIT_HOME=/usr/local/Ascend/ascend-toolkit/latest ++++ arch +++ export LD_LIBRARY_PATH=/usr/local/Ascend/ascend-toolkit/latest/lib64:/usr/local/Ascend/ascend-toolkit/latest/lib64/plugin/opskernel:/usr/local/Ascend/ascend-toolkit/latest/lib64/plugin/nnengine:/usr/local/Ascend/ascend-toolkit/latest/opp/built-in/op_impl/ai_core/tbe/op_tiling/lib/linux/x86_64:/usr/local/Ascend/driver/lib64:/usr/local/Ascend/driver/lib64/common:/usr/local/Ascend/driver/lib64/driver:/driver/lib64/common:/driver/lib64/driver:/driver/lib64:/usr/local/Ascend/ascend-toolkit/latest/tools/aml/lib64:/usr/local/Ascend/ascend-toolkit/latest/tools/aml/lib64/plugin:/usr/local/Ascend/ascend-toolkit/latest/lib64:/usr/local/Ascend/ascend-toolkit/latest/lib64/plugin/opskernel:/usr/local/Ascend/ascend-toolkit/latest/lib64/plugin/nnengine:/usr/local/Ascend/ascend-toolkit/latest/opp/built-in/op_impl/ai_core/tbe/op_tiling/lib/linux/x86_64:/usr/local/Ascend/driver/lib64:/usr/local/Ascend/driver/lib64/common:/usr/local/Ascend/driver/lib64/driver:/usr/local/Ascend/driver/lib64/common:/usr/local/Ascend/driver/lib64/driver:/usr/lib/x86_64-linux-gnu/hdf5/serial: +++ LD_LIBRARY_PATH=/usr/local/Ascend/ascend-toolkit/latest/lib64:/usr/local/Ascend/ascend-toolkit/latest/lib64/plugin/opskernel:/usr/local/Ascend/ascend-toolkit/latest/lib64/plugin/nnengine:/usr/local/Ascend/ascend-toolkit/latest/opp/built-in/op_impl/ai_core/tbe/op_tiling/lib/linux/x86_64:/usr/local/Ascend/driver/lib64:/usr/local/Ascend/driver/lib64/common:/usr/local/Ascend/driver/lib64/driver:/driver/lib64/common:/driver/lib64/driver:/driver/lib64:/usr/local/Ascend/ascend-toolkit/latest/tools/aml/lib64:/usr/local/Ascend/ascend-toolkit/latest/tools/aml/lib64/plugin:/usr/local/Ascend/ascend-toolkit/latest/lib64:/usr/local/Ascend/ascend-toolkit/latest/lib64/plugin/opskernel:/usr/local/Ascend/ascend-toolkit/latest/lib64/plugin/nnengine:/usr/local/Ascend/ascend-toolkit/latest/opp/built-in/op_impl/ai_core/tbe/op_tiling/lib/linux/x86_64:/usr/local/Ascend/driver/lib64:/usr/local/Ascend/driver/lib64/common:/usr/local/Ascend/driver/lib64/driver:/usr/local/Ascend/driver/lib64/common:/usr/local/Ascend/driver/lib64/driver:/usr/lib/x86_64-linux-gnu/hdf5/serial: +++ export LD_LIBRARY_PATH=/usr/local/Ascend/ascend-toolkit/latest/tools/aml/lib64:/usr/local/Ascend/ascend-toolkit/latest/tools/aml/lib64/plugin:/usr/local/Ascend/ascend-toolkit/latest/lib64:/usr/local/Ascend/ascend-toolkit/latest/lib64/plugin/opskernel:/usr/local/Ascend/ascend-toolkit/latest/lib64/plugin/nnengine:/usr/local/Ascend/ascend-toolkit/latest/opp/built-in/op_impl/ai_core/tbe/op_tiling/lib/linux/x86_64:/usr/local/Ascend/driver/lib64:/usr/local/Ascend/driver/lib64/common:/usr/local/Ascend/driver/lib64/driver:/driver/lib64/common:/driver/lib64/driver:/driver/lib64:/usr/local/Ascend/ascend-toolkit/latest/tools/aml/lib64:/usr/local/Ascend/ascend-toolkit/latest/tools/aml/lib64/plugin:/usr/local/Ascend/ascend-toolkit/latest/lib64:/usr/local/Ascend/ascend-toolkit/latest/lib64/plugin/opskernel:/usr/local/Ascend/ascend-toolkit/latest/lib64/plugin/nnengine:/usr/local/Ascend/ascend-toolkit/latest/opp/built-in/op_impl/ai_core/tbe/op_tiling/lib/linux/x86_64:/usr/local/Ascend/driver/lib64:/usr/local/Ascend/driver/lib64/common:/usr/local/Ascend/driver/lib64/driver:/usr/local/Ascend/driver/lib64/common:/usr/local/Ascend/driver/lib64/driver:/usr/lib/x86_64-linux-gnu/hdf5/serial: +++ LD_LIBRARY_PATH=/usr/local/Ascend/ascend-toolkit/latest/tools/aml/lib64:/usr/local/Ascend/ascend-toolkit/latest/tools/aml/lib64/plugin:/usr/local/Ascend/ascend-toolkit/latest/lib64:/usr/local/Ascend/ascend-toolkit/latest/lib64/plugin/opskernel:/usr/local/Ascend/ascend-toolkit/latest/lib64/plugin/nnengine:/usr/local/Ascend/ascend-toolkit/latest/opp/built-in/op_impl/ai_core/tbe/op_tiling/lib/linux/x86_64:/usr/local/Ascend/driver/lib64:/usr/local/Ascend/driver/lib64/common:/usr/local/Ascend/driver/lib64/driver:/driver/lib64/common:/driver/lib64/driver:/driver/lib64:/usr/local/Ascend/ascend-toolkit/latest/tools/aml/lib64:/usr/local/Ascend/ascend-toolkit/latest/tools/aml/lib64/plugin:/usr/local/Ascend/ascend-toolkit/latest/lib64:/usr/local/Ascend/ascend-toolkit/latest/lib64/plugin/opskernel:/usr/local/Ascend/ascend-toolkit/latest/lib64/plugin/nnengine:/usr/local/Ascend/ascend-toolkit/latest/opp/built-in/op_impl/ai_core/tbe/op_tiling/lib/linux/x86_64:/usr/local/Ascend/driver/lib64:/usr/local/Ascend/driver/lib64/common:/usr/local/Ascend/driver/lib64/driver:/usr/local/Ascend/driver/lib64/common:/usr/local/Ascend/driver/lib64/driver:/usr/lib/x86_64-linux-gnu/hdf5/serial: +++ export PYTHONPATH=/usr/local/Ascend/ascend-toolkit/latest/python/site-packages:/usr/local/Ascend/ascend-toolkit/latest/opp/built-in/op_impl/ai_core/tbe:/usr/local/Ascend/ascend-toolkit/latest/python/site-packages:/usr/local/Ascend/ascend-toolkit/latest/opp/built-in/op_impl/ai_core/tbe: +++ PYTHONPATH=/usr/local/Ascend/ascend-toolkit/latest/python/site-packages:/usr/local/Ascend/ascend-toolkit/latest/opp/built-in/op_impl/ai_core/tbe:/usr/local/Ascend/ascend-toolkit/latest/python/site-packages:/usr/local/Ascend/ascend-toolkit/latest/opp/built-in/op_impl/ai_core/tbe: +++ export PATH=/usr/local/Ascend/ascend-toolkit/latest/bin:/usr/local/Ascend/ascend-toolkit/latest/compiler/ccec_compiler/bin:/usr/local/Ascend/ascend-toolkit/latest/tools/ccec_compiler/bin:/usr/local/Ascend/ascend-toolkit/latest/bin:/usr/local/Ascend/ascend-toolkit/latest/compiler/ccec_compiler/bin:/usr/local/Ascend/ascend-toolkit/latest/tools/ccec_compiler/bin:/root/miniconda3/envs/py38/bin:/root/miniconda3/condabin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/sbin:/usr/local/bin +++ PATH=/usr/local/Ascend/ascend-toolkit/latest/bin:/usr/local/Ascend/ascend-toolkit/latest/compiler/ccec_compiler/bin:/usr/local/Ascend/ascend-toolkit/latest/tools/ccec_compiler/bin:/usr/local/Ascend/ascend-toolkit/latest/bin:/usr/local/Ascend/ascend-toolkit/latest/compiler/ccec_compiler/bin:/usr/local/Ascend/ascend-toolkit/latest/tools/ccec_compiler/bin:/root/miniconda3/envs/py38/bin:/root/miniconda3/condabin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/sbin:/usr/local/bin +++ export ASCEND_AICPU_PATH=/usr/local/Ascend/ascend-toolkit/latest +++ ASCEND_AICPU_PATH=/usr/local/Ascend/ascend-toolkit/latest +++ export ASCEND_OPP_PATH=/usr/local/Ascend/ascend-toolkit/latest/opp +++ ASCEND_OPP_PATH=/usr/local/Ascend/ascend-toolkit/latest/opp +++ export TOOLCHAIN_HOME=/usr/local/Ascend/ascend-toolkit/latest/toolkit +++ TOOLCHAIN_HOME=/usr/local/Ascend/ascend-toolkit/latest/toolkit +++ export ASCEND_HOME_PATH=/usr/local/Ascend/ascend-toolkit/latest +++ ASCEND_HOME_PATH=/usr/local/Ascend/ascend-toolkit/latest ++ export HCCL_CONNECT_TIMEOUT=7200 ++ HCCL_CONNECT_TIMEOUT=7200 ++ export HCCL_EXEC_TIMEOUT=7200 ++ HCCL_EXEC_TIMEOUT=7200 ++ export COMBINED_ENABLE=1 ++ COMBINED_ENABLE=1 ++ export MULTI_STREAM_MEMORY_REUSE=1 ++ MULTI_STREAM_MEMORY_REUSE=1 ++ export HCCL_RDMA_TC=160 ++ HCCL_RDMA_TC=160 ++ export HCCL_RDMA_SL=5 ++ HCCL_RDMA_SL=5 ++ export HCCL_INTRA_PCIE_ENABLE=0 ++ HCCL_INTRA_PCIE_ENABLE=0 ++ export HCCL_INTRA_ROCE_ENABLE=1 ++ HCCL_INTRA_ROCE_ENABLE=1 ++ export HCCL_RDMA_TIMEOUT=20 ++ HCCL_RDMA_TIMEOUT=20 ++ export INF_NAN_MODE_ENABLE=1 ++ INF_NAN_MODE_ENABLE=1 ++ export DISTRIBUTED_BACKEND=hccl ++ DISTRIBUTED_BACKEND=hccl ++ export ASCEND_LAUNCH_BLOCKING=0 ++ ASCEND_LAUNCH_BLOCKING=0 ++ export ASCEND_SLOG_PRINT_TO_STDOUT=0 ++ ASCEND_SLOG_PRINT_TO_STDOUT=0 ++ export ASCEND_GLOBAL_LOG_LEVEL=3 ++ ASCEND_GLOBAL_LOG_LEVEL=3 ++ export ASCEND_GLOBAL_EVENT_ENABLE=0 ++ ASCEND_GLOBAL_EVENT_ENABLE=0 ++ export TASK_QUEUE_ENABLE=1 ++ TASK_QUEUE_ENABLE=1 ++ export PTCOPY_ENABLE=1 ++ PTCOPY_ENABLE=1 ++ export COMBINED_ENABLE=1 ++ COMBINED_ENABLE=1 ++ export DYNAMIC_OP=ADD#MUL ++ DYNAMIC_OP=ADD#MUL ++ export HCCL_WHITELIST_DISABLE=1 ++ HCCL_WHITELIST_DISABLE=1 ++ export HCCL_CONNECT_TIMEOUT=7200 ++ HCCL_CONNECT_TIMEOUT=7200 ++ export HCCL_WHITELIST_DISABLE=1 ++ HCCL_WHITELIST_DISABLE=1 ++ export CUDA_DEVICE_MAX_CONNECTIONS=1 ++ CUDA_DEVICE_MAX_CONNECTIONS=1 ++ pip3 install --no-index --find-links=/data/software/ -r requirements_npu.txt Looking in links: /data/software/ Processing data/software/expecttest-0.2.1-py3-none-any.whl (from -r requirements_npu.txt (line 1)) Requirement already satisfied: peft in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from -r requirements_npu.txt (line 2)) (0.7.0) Processing data/software/XlsxWriter-3.2.0-py3-none-any.whl (from -r requirements_npu.txt (line 3)) Requirement already satisfied: termcolor in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from -r requirements_npu.txt (line 4)) (2.4.0) Requirement already satisfied: tabulate in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from -r requirements_npu.txt (line 5)) (0.9.0) Processing data/software/tiktoken-0.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (from -r requirements_npu.txt (line 6)) Requirement already satisfied: matplotlib in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from -r requirements_npu.txt (line 7)) (3.7.5) Processing data/software/datasets-3.0.0-py3-none-any.whl (from -r requirements_npu.txt (line 8)) Requirement already satisfied: einops in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from -r requirements_npu.txt (line 9)) (0.7.0) Processing data/software/pybind11-2.13.6-py3-none-any.whl (from -r requirements_npu.txt (line 10)) Requirement already satisfied: tensorboardX in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from -r requirements_npu.txt (line 11)) (2.6.2.2) Processing data/software/pyarrow-17.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (from -r requirements_npu.txt (line 12)) Requirement already satisfied: transformers>=4.40.1 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from -r requirements_npu.txt (line 13)) (4.40.1) Requirement already satisfied: deepspeed>=0.14.2 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from -r requirements_npu.txt (line 14)) (0.14.5) Processing data/software/accelerate-0.34.2-py3-none-any.whl (from -r requirements_npu.txt (line 15)) Requirement already satisfied: timm in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from -r requirements_npu.txt (line 16)) (0.9.16) Processing data/software/flask-3.0.3-py3-none-any.whl (from -r requirements_npu.txt (line 17)) Processing data/software/Flask_RESTful-0.3.10-py2.py3-none-any.whl (from -r requirements_npu.txt (line 18)) Processing data/software/decord-0.6.0-py3-none-manylinux2010_x86_64.whl (from -r requirements_npu.txt (line 19)) Processing data/software/natsort-8.4.0-py3-none-any.whl (from -r requirements_npu.txt (line 20)) Requirement already satisfied: numpy>=1.17 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from peft->-r requirements_npu.txt (line 2)) (1.24.4) Requirement already satisfied: packaging>=20.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from peft->-r requirements_npu.txt (line 2)) (23.2) Requirement already satisfied: psutil in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from peft->-r requirements_npu.txt (line 2)) (5.9.8) Requirement already satisfied: pyyaml in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from peft->-r requirements_npu.txt (line 2)) (5.4.1) Requirement already satisfied: torch>=1.13.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from peft->-r requirements_npu.txt (line 2)) (2.1.0+cpu) Requirement already satisfied: tqdm in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from peft->-r requirements_npu.txt (line 2)) (4.66.2) Requirement already satisfied: safetensors in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from peft->-r requirements_npu.txt (line 2)) (0.4.2) Requirement already satisfied: huggingface-hub>=0.17.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from peft->-r requirements_npu.txt (line 2)) (0.20.3) Requirement already satisfied: regex>=2022.1.18 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from tiktoken->-r requirements_npu.txt (line 6)) (2023.12.25) Requirement already satisfied: requests>=2.26.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from tiktoken->-r requirements_npu.txt (line 6)) (2.31.0) Requirement already satisfied: contourpy>=1.0.1 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from matplotlib->-r requirements_npu.txt (line 7)) (1.1.1) Requirement already satisfied: cycler>=0.10 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from matplotlib->-r requirements_npu.txt (line 7)) (0.12.1) Requirement already satisfied: fonttools>=4.22.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from matplotlib->-r requirements_npu.txt (line 7)) (4.49.0) Requirement already satisfied: kiwisolver>=1.0.1 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from matplotlib->-r requirements_npu.txt (line 7)) (1.4.5) Requirement already satisfied: pillow>=6.2.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from matplotlib->-r requirements_npu.txt (line 7)) (10.2.0) Requirement already satisfied: pyparsing>=2.3.1 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from matplotlib->-r requirements_npu.txt (line 7)) (3.1.1) Requirement already satisfied: python-dateutil>=2.7 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from matplotlib->-r requirements_npu.txt (line 7)) (2.8.2) Requirement already satisfied: importlib-resources>=3.2.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from matplotlib->-r requirements_npu.txt (line 7)) (6.1.2) Requirement already satisfied: filelock in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from datasets>=2.21.0->-r requirements_npu.txt (line 8)) (3.13.1) Requirement already satisfied: dill<0.3.9,>=0.3.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from datasets>=2.21.0->-r requirements_npu.txt (line 8)) (0.3.7) Requirement already satisfied: pandas in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from datasets>=2.21.0->-r requirements_npu.txt (line 8)) (2.0.3) Processing data/software/requests-2.32.3-py3-none-any.whl (from tiktoken->-r requirements_npu.txt (line 6)) Processing data/software/tqdm-4.67.1-py3-none-any.whl (from peft->-r requirements_npu.txt (line 2)) Requirement already satisfied: xxhash in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from datasets>=2.21.0->-r requirements_npu.txt (line 8)) (3.4.1) Requirement already satisfied: multiprocess in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from datasets>=2.21.0->-r requirements_npu.txt (line 8)) (0.70.15) Requirement already satisfied: fsspec<=2024.6.1,>=2023.1.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from fsspec[http]<=2024.6.1,>=2023.1.0->datasets>=2.21.0->-r requirements_npu.txt (line 8)) (2023.10.0) Requirement already satisfied: aiohttp in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from datasets>=2.21.0->-r requirements_npu.txt (line 8)) (3.9.3) Processing data/software/huggingface_hub-0.26.2-py3-none-any.whl (from peft->-r requirements_npu.txt (line 2)) Requirement already satisfied: protobuf>=3.20 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from tensorboardX->-r requirements_npu.txt (line 11)) (4.25.3) Requirement already satisfied: tokenizers<0.20,>=0.19 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from transformers>=4.40.1->-r requirements_npu.txt (line 13)) (0.19.1) Requirement already satisfied: hjson in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from deepspeed>=0.14.2->-r requirements_npu.txt (line 14)) (3.1.0) Requirement already satisfied: ninja in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from deepspeed>=0.14.2->-r requirements_npu.txt (line 14)) (1.11.1.1) Requirement already satisfied: nvidia-ml-py in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from deepspeed>=0.14.2->-r requirements_npu.txt (line 14)) (12.560.30) Requirement already satisfied: py-cpuinfo in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from deepspeed>=0.14.2->-r requirements_npu.txt (line 14)) (9.0.0) Requirement already satisfied: pydantic in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from deepspeed>=0.14.2->-r requirements_npu.txt (line 14)) (1.10.15) Processing data/software/safetensors-0.4.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (from peft->-r requirements_npu.txt (line 2)) Requirement already satisfied: torchvision in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from timm->-r requirements_npu.txt (line 16)) (0.16.0) Requirement already satisfied: Werkzeug>=3.0.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from flask->-r requirements_npu.txt (line 17)) (3.0.1) Requirement already satisfied: Jinja2>=3.1.2 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from flask->-r requirements_npu.txt (line 17)) (3.1.3) Processing data/software/itsdangerous-2.2.0-py3-none-any.whl (from flask->-r requirements_npu.txt (line 17)) Requirement already satisfied: click>=8.1.3 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from flask->-r requirements_npu.txt (line 17)) (8.1.7) Processing data/software/blinker-1.8.2-py3-none-any.whl (from flask->-r requirements_npu.txt (line 17)) Requirement already satisfied: importlib-metadata>=3.6.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from flask->-r requirements_npu.txt (line 17)) (7.0.1) Processing data/software/aniso8601-9.0.1-py2.py3-none-any.whl (from flask_restful->-r requirements_npu.txt (line 18)) Requirement already satisfied: six>=1.3.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from flask_restful->-r requirements_npu.txt (line 18)) (1.16.0) Requirement already satisfied: pytz in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from flask_restful->-r requirements_npu.txt (line 18)) (2024.1) Requirement already satisfied: aiosignal>=1.1.2 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from aiohttp->datasets>=2.21.0->-r requirements_npu.txt (line 8)) (1.3.1) Requirement already satisfied: attrs>=17.3.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from aiohttp->datasets>=2.21.0->-r requirements_npu.txt (line 8)) (23.2.0) Requirement already satisfied: frozenlist>=1.1.1 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from aiohttp->datasets>=2.21.0->-r requirements_npu.txt (line 8)) (1.4.1) Requirement already satisfied: multidict<7.0,>=4.5 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from aiohttp->datasets>=2.21.0->-r requirements_npu.txt (line 8)) (6.0.5) Requirement already satisfied: yarl<2.0,>=1.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from aiohttp->datasets>=2.21.0->-r requirements_npu.txt (line 8)) (1.9.4) Requirement already satisfied: async-timeout<5.0,>=4.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from aiohttp->datasets>=2.21.0->-r requirements_npu.txt (line 8)) (4.0.3) Requirement already satisfied: typing-extensions>=3.7.4.3 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from huggingface-hub>=0.17.0->peft->-r requirements_npu.txt (line 2)) (4.10.0) Requirement already satisfied: zipp>=0.5 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from importlib-metadata>=3.6.0->flask->-r requirements_npu.txt (line 17)) (3.17.0) Requirement already satisfied: MarkupSafe>=2.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from Jinja2>=3.1.2->flask->-r requirements_npu.txt (line 17)) (2.1.5) Requirement already satisfied: charset-normalizer<4,>=2 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from requests>=2.26.0->tiktoken->-r requirements_npu.txt (line 6)) (3.3.2) Requirement already satisfied: idna<4,>=2.5 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from requests>=2.26.0->tiktoken->-r requirements_npu.txt (line 6)) (3.6) Requirement already satisfied: urllib3<3,>=1.21.1 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from requests>=2.26.0->tiktoken->-r requirements_npu.txt (line 6)) (1.26.18) Requirement already satisfied: certifi>=2017.4.17 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from requests>=2.26.0->tiktoken->-r requirements_npu.txt (line 6)) (2024.2.2) Requirement already satisfied: sympy in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from torch>=1.13.0->peft->-r requirements_npu.txt (line 2)) (1.4) Requirement already satisfied: networkx in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from torch>=1.13.0->peft->-r requirements_npu.txt (line 2)) (3.1) Requirement already satisfied: tzdata>=2022.1 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from pandas->datasets>=2.21.0->-r requirements_npu.txt (line 8)) (2024.1) Requirement already satisfied: mpmath>=0.19 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from sympy->torch>=1.13.0->peft->-r requirements_npu.txt (line 2)) (1.3.0) DEPRECATION: apex 0.1-ascend-20240523 has a non-standard version number. pip 24.1 will enforce this behaviour change. A possible replacement is to upgrade to a newer version of apex or contact the author to suggest that they release a version with a conforming version number. Discussion can be found at https://github.com/pypa/pip/issues/12063 Installing collected packages: aniso8601, xlsxwriter, tqdm, safetensors, requests, pybind11, pyarrow, natsort, itsdangerous, expecttest, decord, blinker, tiktoken, huggingface-hub, flask, flask_restful, accelerate, datasets Attempting uninstall: tqdm Found existing installation: tqdm 4.66.2 Uninstalling tqdm-4.66.2: Successfully uninstalled tqdm-4.66.2 Attempting uninstall: safetensors Found existing installation: safetensors 0.4.2 Uninstalling safetensors-0.4.2: Successfully uninstalled safetensors-0.4.2 Attempting uninstall: requests Found existing installation: requests 2.31.0 Uninstalling requests-2.31.0: Successfully uninstalled requests-2.31.0 Attempting uninstall: pyarrow Found existing installation: pyarrow 15.0.0 Uninstalling pyarrow-15.0.0: Successfully uninstalled pyarrow-15.0.0 Attempting uninstall: huggingface-hub Found existing installation: huggingface-hub 0.20.3 Uninstalling huggingface-hub-0.20.3: Successfully uninstalled huggingface-hub-0.20.3 Attempting uninstall: accelerate Found existing installation: accelerate 0.25.0 Uninstalling accelerate-0.25.0: Successfully uninstalled accelerate-0.25.0 Attempting uninstall: datasets Found existing installation: datasets 2.16.0 Uninstalling datasets-2.16.0: Successfully uninstalled datasets-2.16.0 ERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts. tikit 1.8.2.240926 requires dicttoxml==1.7.4, which is not installed. tikit 1.8.2.240926 requires docopt==0.6.2, which is not installed. tikit 1.8.2.240926 requires future==0.18.2, which is not installed. tikit 1.8.2.240926 requires hdfs==2.6.0, which is not installed. tikit 1.8.2.240926 requires pure-sasl==0.6.2, which is not installed. tikit 1.8.2.240926 requires py4j==0.10.7, which is not installed. tikit 1.8.2.240926 requires PyHive[hive]==0.6.4, which is not installed. tikit 1.8.2.240926 requires pyjwt>=2.4.0, which is not installed. tikit 1.8.2.240926 requires requests-kerberos>=0.14.0, which is not installed. tikit 1.8.2.240926 requires sasl==0.3.1, which is not installed. tikit 1.8.2.240926 requires thrift==0.15.0, which is not installed. tikit 1.8.2.240926 requires thrift-sasl>=0.1.0, which is not installed. tikit 1.8.2.240926 requires certifi==2021.10.8, but you have certifi 2024.2.2 which is incompatible. tikit 1.8.2.240926 requires cos-python-sdk-v5==1.9.29, but you have cos-python-sdk-v5 1.9.26 which is incompatible. tikit 1.8.2.240926 requires idna==3.3, but you have idna 3.6 which is incompatible. tikit 1.8.2.240926 requires prettytable==2.5.0, but you have prettytable 3.11.0 which is incompatible. tikit 1.8.2.240926 requires urllib3==1.26.7, but you have urllib3 1.26.18 which is incompatible. tikit 1.8.2.240926 requires wcwidth==0.2.5, but you have wcwidth 0.2.13 which is incompatible. Successfully installed accelerate-0.34.2 aniso8601-9.0.1 blinker-1.8.2 datasets-3.0.0 decord-0.6.0 expecttest-0.2.1 flask-3.0.3 flask_restful-0.3.10 huggingface-hub-0.26.2 itsdangerous-2.2.0 natsort-8.4.0 pyarrow-17.0.0 pybind11-2.13.6 requests-2.32.3 safetensors-0.4.5 tiktoken-0.7.0 tqdm-4.67.1 xlsxwriter-3.2.0 WARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv ++ return 0 + MEGATRON_DIR=/local_disk/cognitron_vl//third_party/Megatron-LM_core_r0.6.0/ + MINDSPEED_DIR=/local_disk/cognitron_vl//third_party/MindSpeed_core_r0.6.0/ + MODELLINK_DIR=/local_disk/cognitron_vl//third_party/ModelLink/ + pip3 install --no-index --find-links=/data/software/ -e /local_disk/cognitron_vl//third_party/Megatron-LM_core_r0.6.0/ Looking in links: /data/software/ Obtaining file://local_disk/cognitron_vl/third_party/Megatron-LM_core_r0.6.0 Installing build dependencies: started Installing build dependencies: finished with status 'done' Checking if build backend supports build_editable: started Checking if build backend supports build_editable: finished with status 'done' Getting requirements to build editable: started Getting requirements to build editable: finished with status 'done' Installing backend dependencies: started Installing backend dependencies: finished with status 'done' Preparing editable metadata (pyproject.toml): started Preparing editable metadata (pyproject.toml): finished with status 'done' Building wheels for collected packages: megatron_core Building editable for megatron_core (pyproject.toml): started Building editable for megatron_core (pyproject.toml): finished with status 'done' Created wheel for megatron_core: filename=megatron_core-0.6.0-0.editable-cp38-cp38-linux_x86_64.whl size=8791 sha256=e22043250cc517bfde599bded9b433617ef93e516e8ca3b66942c3767ac31d80 Stored in directory: /tmp/pip-ephem-wheel-cache-h7zi5_l2/wheels/54/9c/d1/d2015aa0c34e791e64d65d19395e5a9a5528f0c63fd519b9ff Successfully built megatron_core DEPRECATION: apex 0.1-ascend-20240523 has a non-standard version number. pip 24.1 will enforce this behaviour change. A possible replacement is to upgrade to a newer version of apex or contact the author to suggest that they release a version with a conforming version number. Discussion can be found at https://github.com/pypa/pip/issues/12063 Installing collected packages: megatron_core Successfully installed megatron_core-0.6.0 WARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv + pip3 install --no-index --find-links=/data/software/ -e /local_disk/cognitron_vl//third_party/MindSpeed_core_r0.6.0/ Looking in links: /data/software/ Obtaining file://local_disk/cognitron_vl/third_party/MindSpeed_core_r0.6.0 Preparing metadata (setup.py): started Preparing metadata (setup.py): finished with status 'done' WARNING: Error parsing requirements for tokenizers: [Errno 2] No such file or directory: '/root/miniconda3/envs/py38/lib/python3.8/site-packages/tokenizers-0.19.1.dist-info/METADATA' WARNING: Error parsing requirements for transformers: [Errno 2] No such file or directory: '/root/miniconda3/envs/py38/lib/python3.8/site-packages/transformers-4.40.1.dist-info/METADATA' DEPRECATION: apex 0.1-ascend-20240523 has a non-standard version number. pip 24.1 will enforce this behaviour change. A possible replacement is to upgrade to a newer version of apex or contact the author to suggest that they release a version with a conforming version number. Discussion can be found at https://github.com/pypa/pip/issues/12063 Installing collected packages: mindspeed Running setup.py develop for mindspeed Successfully installed mindspeed-0.6.0 WARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv + pip3 install --no-index --find-links=/data/software/ -e /local_disk/cognitron_vl//third_party/ModelLink/ Looking in links: /data/software/ Obtaining file://local_disk/cognitron_vl/third_party/ModelLink Preparing metadata (setup.py): started Preparing metadata (setup.py): finished with status 'done' Requirement already satisfied: numpy in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from modellink==0.0.1) (1.24.4) Processing data/software/transformers-4.43.2-py3-none-any.whl (from modellink==0.0.1) Processing data/software/transformers-stream-generator-0.0.5.tar.gz (from modellink==0.0.1) Preparing metadata (setup.py): started Preparing metadata (setup.py): finished with status 'done' Requirement already satisfied: sympy in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from modellink==0.0.1) (1.4) Requirement already satisfied: decorator in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from modellink==0.0.1) (5.1.1) Requirement already satisfied: scipy in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from modellink==0.0.1) (1.10.1) Requirement already satisfied: sentencepiece in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from modellink==0.0.1) (0.2.0) Requirement already satisfied: einops in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from modellink==0.0.1) (0.7.0) Requirement already satisfied: datasets in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from modellink==0.0.1) (3.0.0) Requirement already satisfied: pybind11 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from modellink==0.0.1) (2.13.6) Requirement already satisfied: accelerate in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from modellink==0.0.1) (0.34.2) Requirement already satisfied: six in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from modellink==0.0.1) (1.16.0) Requirement already satisfied: protobuf in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from modellink==0.0.1) (4.25.3) Processing data/software/peft-0.7.1-py3-none-any.whl (from modellink==0.0.1) Requirement already satisfied: tiktoken in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from modellink==0.0.1) (0.7.0) Requirement already satisfied: packaging>=20.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from peft==0.7.1->modellink==0.0.1) (23.2) Requirement already satisfied: psutil in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from peft==0.7.1->modellink==0.0.1) (5.9.8) Requirement already satisfied: pyyaml in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from peft==0.7.1->modellink==0.0.1) (5.4.1) Requirement already satisfied: torch>=1.13.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from peft==0.7.1->modellink==0.0.1) (2.1.0+cpu) Requirement already satisfied: tqdm in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from peft==0.7.1->modellink==0.0.1) (4.67.1) Requirement already satisfied: safetensors in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from peft==0.7.1->modellink==0.0.1) (0.4.5) Requirement already satisfied: huggingface-hub>=0.17.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from peft==0.7.1->modellink==0.0.1) (0.26.2) Requirement already satisfied: filelock in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from transformers==4.43.2->modellink==0.0.1) (3.13.1) Requirement already satisfied: regex!=2019.12.17 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from transformers==4.43.2->modellink==0.0.1) (2023.12.25) Requirement already satisfied: requests in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from transformers==4.43.2->modellink==0.0.1) (2.32.3) Processing data/software/tokenizers-0.19.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (from transformers==4.43.2->modellink==0.0.1) Requirement already satisfied: pyarrow>=15.0.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from datasets->modellink==0.0.1) (17.0.0) Requirement already satisfied: dill<0.3.9,>=0.3.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from datasets->modellink==0.0.1) (0.3.7) Requirement already satisfied: pandas in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from datasets->modellink==0.0.1) (2.0.3) Requirement already satisfied: xxhash in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from datasets->modellink==0.0.1) (3.4.1) Requirement already satisfied: multiprocess in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from datasets->modellink==0.0.1) (0.70.15) Requirement already satisfied: fsspec<=2024.6.1,>=2023.1.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from fsspec[http]<=2024.6.1,>=2023.1.0->datasets->modellink==0.0.1) (2023.10.0) Requirement already satisfied: aiohttp in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from datasets->modellink==0.0.1) (3.9.3) Requirement already satisfied: mpmath>=0.19 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from sympy->modellink==0.0.1) (1.3.0) Requirement already satisfied: aiosignal>=1.1.2 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from aiohttp->datasets->modellink==0.0.1) (1.3.1) Requirement already satisfied: attrs>=17.3.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from aiohttp->datasets->modellink==0.0.1) (23.2.0) Requirement already satisfied: frozenlist>=1.1.1 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from aiohttp->datasets->modellink==0.0.1) (1.4.1) Requirement already satisfied: multidict<7.0,>=4.5 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from aiohttp->datasets->modellink==0.0.1) (6.0.5) Requirement already satisfied: yarl<2.0,>=1.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from aiohttp->datasets->modellink==0.0.1) (1.9.4) Requirement already satisfied: async-timeout<5.0,>=4.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from aiohttp->datasets->modellink==0.0.1) (4.0.3) Requirement already satisfied: typing-extensions>=3.7.4.3 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from huggingface-hub>=0.17.0->peft==0.7.1->modellink==0.0.1) (4.10.0) Requirement already satisfied: charset-normalizer<4,>=2 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from requests->transformers==4.43.2->modellink==0.0.1) (3.3.2) Requirement already satisfied: idna<4,>=2.5 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from requests->transformers==4.43.2->modellink==0.0.1) (3.6) Requirement already satisfied: urllib3<3,>=1.21.1 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from requests->transformers==4.43.2->modellink==0.0.1) (1.26.18) Requirement already satisfied: certifi>=2017.4.17 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from requests->transformers==4.43.2->modellink==0.0.1) (2024.2.2) Requirement already satisfied: networkx in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from torch>=1.13.0->peft==0.7.1->modellink==0.0.1) (3.1) Requirement already satisfied: jinja2 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from torch>=1.13.0->peft==0.7.1->modellink==0.0.1) (3.1.3) Requirement already satisfied: python-dateutil>=2.8.2 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from pandas->datasets->modellink==0.0.1) (2.8.2) Requirement already satisfied: pytz>=2020.1 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from pandas->datasets->modellink==0.0.1) (2024.1) Requirement already satisfied: tzdata>=2022.1 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from pandas->datasets->modellink==0.0.1) (2024.1) Requirement already satisfied: MarkupSafe>=2.0 in /root/miniconda3/envs/py38/lib/python3.8/site-packages (from jinja2->torch>=1.13.0->peft==0.7.1->modellink==0.0.1) (2.1.5) Building wheels for collected packages: transformers_stream_generator Building wheel for transformers_stream_generator (setup.py): started Building wheel for transformers_stream_generator (setup.py): finished with status 'done' Created wheel for transformers_stream_generator: filename=transformers_stream_generator-0.0.5-py3-none-any.whl size=12425 sha256=8ba850436130d2f5021790b66e81977246abf57ffa083df74963c9acfa63ad6c Stored in directory: /root/.cache/pip/wheels/56/8c/42/5381d9c36bc85f28982f4cf8f98dc44d37a6d6c04897a5cb7c Successfully built transformers_stream_generator DEPRECATION: apex 0.1-ascend-20240523 has a non-standard version number. pip 24.1 will enforce this behaviour change. A possible replacement is to upgrade to a newer version of apex or contact the author to suggest that they release a version with a conforming version number. Discussion can be found at https://github.com/pypa/pip/issues/12063 Installing collected packages: tokenizers, transformers, transformers_stream_generator, peft, modellink Attempting uninstall: tokenizers Found existing installation: tokenizers 0.20.3 Uninstalling tokenizers-0.20.3: Successfully uninstalled tokenizers-0.20.3 Attempting uninstall: transformers Found existing installation: transformers 4.46.3 Uninstalling transformers-4.46.3: Successfully uninstalled transformers-4.46.3 Attempting uninstall: peft Found existing installation: peft 0.7.0 Uninstalling peft-0.7.0: Successfully uninstalled peft-0.7.0 Running setup.py develop for modellink Successfully installed modellink-0.0.1 peft-0.7.1 tokenizers-0.19.1 transformers-4.43.2 transformers_stream_generator-0.0.5 WARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv + export PYTHONPATH=/local_disk/cognitron_vl//third_party/Megatron-LM_core_r0.6.0//:/usr/local/Ascend/ascend-toolkit/latest/python/site-packages:/usr/local/Ascend/ascend-toolkit/latest/opp/built-in/op_impl/ai_core/tbe:/usr/local/Ascend/ascend-toolkit/latest/python/site-packages:/usr/local/Ascend/ascend-toolkit/latest/opp/built-in/op_impl/ai_core/tbe: + PYTHONPATH=/local_disk/cognitron_vl//third_party/Megatron-LM_core_r0.6.0//:/usr/local/Ascend/ascend-toolkit/latest/python/site-packages:/usr/local/Ascend/ascend-toolkit/latest/opp/built-in/op_impl/ai_core/tbe:/usr/local/Ascend/ascend-toolkit/latest/python/site-packages:/usr/local/Ascend/ascend-toolkit/latest/opp/built-in/op_impl/ai_core/tbe: + GPUS_PER_NODE=16 + NNODES=32 + NODE_RANK=19 + MASTER_PORT=34567 + export CUDA_DEVICE_MAX_CONNECTIONS=1 + CUDA_DEVICE_MAX_CONNECTIONS=1 + export PYTORCH_NPU_ALLOC_CONF=expandable_segments:True + PYTORCH_NPU_ALLOC_CONF=expandable_segments:True + VISION_SEQ_LENGTH=1025 + IMAGE_TOKEN_LENGTH=256 + IMAGE_SIZE=448 + VISION_MODEL_TYPE=intern_300m + TP=8 + PP=1 + CP=8 + CP_ALGO=megatron_cp_algo + CP_MASK=causal + DISTRIBUTED_ARGS=' --nproc_per_node 16 --nnodes 32 --node_rank 19 --master_addr train-1198772881325351168-93vlj4s2getc-master-0.train-100034032793.svc.cluster.local --master_port 34567 ' + GPT_ARGS=' --use-mcore-models --tensor-model-parallel-size 8 --pipeline-model-parallel-size 1 --context-parallel-size 8 --context-parallel-algo megatron_cp_algo --cp-attention-mask-type causal --use-cp-send-recv-overlap --no-create-attention-mask-in-dataloader --sparse-mode 4 --sequence-parallel --recompute-method block --recompute-granularity full --recompute-num-layers 48 --num-layers 48 --hidden-size 5120 --ffn-hidden-size 13824 --num-attention-heads 40 --group-query-attention --num-query-groups 8 --tokenizer-type PretrainedFromHF --tokenizer-name-or-path /data_4/models/Qwen/Qwen2.5-14B-Instruct/ --seq-length 1048576 --max-position-embeddings 1048576 --micro-batch-size 1 --global-batch-size 8 --make-vocab-size-divisible-by 1 --padded-vocab-size 152064 --rotary-base 1000000.0 --lr 5.00e-6 --train-iters 500 --lr-decay-style cosine --untie-embeddings-and-output-weights --disable-bias-linear --attention-dropout 0.0 --init-method-std 0.01 --hidden-dropout 0.0 --position-embedding-type rope --normalization RMSNorm --use-fused-rmsnorm --norm-epsilon 1e-6 --swiglu --use-flash-attn --use-fused-rotary-pos-emb --use-rotary-position-embeddings --use-fused-swiglu --use-mc2 --no-masked-softmax-fusion --attention-softmax-in-fp32 --min-lr 1.00e-7 --weight-decay 0.0 --lr-warmup-fraction 0.03 --clip-grad 1.0 --adam-beta1 0.9 --adam-beta2 0.999 --add-qkv-bias --initial-loss-scale 4096 --no-gradient-accumulation-fusion --use-distributed-optimizer --bf16 --overlap-grad-reduce --finetune --vision-model-freeze --vision-model-type intern_300m --vision-downsample-ratio 0.5 --vision-projector-type mlp --vision-projector-pre-norm --vision-process-type dynamic --vision-normalize-type imagenet --vision-seq-length 1025 --image-token-length 256 --image-size 448 --prompt-format qwen2 --is-instruction-dataset --max-num-image 4096 --max-fps 1 --add-class-token --min-patch-grid 1 --max-patch-grid 12 --logit-mask --cross-dataset-joint ' + DATA_ARGS=' --data-path /local_disk/cognitron_vl//configs/lcvlm_finetune_stage4.yaml --split 100,0,0 --data-seq-length 1048576 --num-workers 8 ' + CKPT_ARGS=' --load /data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp2_stage3.sh/20241127_204213/ --vit-load / --no-load-optim --no-load-rng --seed 42424242 --save /data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp8_stage4.sh/20241128_234743// ' + OUTPUT_ARGS=' --log-interval 1 --save-interval 20 --eval-interval 20 --eval-iters 0 --log-throughput --distributed-timeout-minutes 120 ' + torchrun --nproc_per_node 16 --nnodes 32 --node_rank 19 --master_addr train-1198772881325351168-93vlj4s2getc-master-0.train-100034032793.svc.cluster.local --master_port 34567 /local_disk/cognitron_vl//lcvlm_modellink/pretrain_lcvlm.py --use-mcore-models --tensor-model-parallel-size 8 --pipeline-model-parallel-size 1 --context-parallel-size 8 --context-parallel-algo megatron_cp_algo --cp-attention-mask-type causal --use-cp-send-recv-overlap --no-create-attention-mask-in-dataloader --sparse-mode 4 --sequence-parallel --recompute-method block --recompute-granularity full --recompute-num-layers 48 --num-layers 48 --hidden-size 5120 --ffn-hidden-size 13824 --num-attention-heads 40 --group-query-attention --num-query-groups 8 --tokenizer-type PretrainedFromHF --tokenizer-name-or-path /data_4/models/Qwen/Qwen2.5-14B-Instruct/ --seq-length 1048576 --max-position-embeddings 1048576 --micro-batch-size 1 --global-batch-size 8 --make-vocab-size-divisible-by 1 --padded-vocab-size 152064 --rotary-base 1000000.0 --lr 5.00e-6 --train-iters 500 --lr-decay-style cosine --untie-embeddings-and-output-weights --disable-bias-linear --attention-dropout 0.0 --init-method-std 0.01 --hidden-dropout 0.0 --position-embedding-type rope --normalization RMSNorm --use-fused-rmsnorm --norm-epsilon 1e-6 --swiglu --use-flash-attn --use-fused-rotary-pos-emb --use-rotary-position-embeddings --use-fused-swiglu --use-mc2 --no-masked-softmax-fusion --attention-softmax-in-fp32 --min-lr 1.00e-7 --weight-decay 0.0 --lr-warmup-fraction 0.03 --clip-grad 1.0 --adam-beta1 0.9 --adam-beta2 0.999 --add-qkv-bias --initial-loss-scale 4096 --no-gradient-accumulation-fusion --use-distributed-optimizer --bf16 --overlap-grad-reduce --finetune --vision-model-freeze --vision-model-type intern_300m --vision-downsample-ratio 0.5 --vision-projector-type mlp --vision-projector-pre-norm --vision-process-type dynamic --vision-normalize-type imagenet --vision-seq-length 1025 --image-token-length 256 --image-size 448 --prompt-format qwen2 --is-instruction-dataset --max-num-image 4096 --max-fps 1 --add-class-token --min-patch-grid 1 --max-patch-grid 12 --logit-mask --cross-dataset-joint --data-path /local_disk/cognitron_vl//configs/lcvlm_finetune_stage4.yaml --split 100,0,0 --data-seq-length 1048576 --num-workers 8 --load /data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp2_stage3.sh/20241127_204213/ --vit-load / --no-load-optim --no-load-rng --seed 42424242 --save /data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp8_stage4.sh/20241128_234743// --log-interval 1 --save-interval 20 --eval-interval 20 --eval-iters 0 --log-throughput --distributed-timeout-minutes 120 --distributed-backend nccl [2024-11-28 15:50:29,888] torch.distributed.run: [WARNING] [2024-11-28 15:50:29,888] torch.distributed.run: [WARNING] ***************************************** [2024-11-28 15:50:29,888] torch.distributed.run: [WARNING] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. [2024-11-28 15:50:29,888] torch.distributed.run: [WARNING] ***************************************** Using /root/.cache/torch_extensions/py38_cpu as PyTorch extensions root...Using /root/.cache/torch_extensions/py38_cpu as PyTorch extensions root...Using /root/.cache/torch_extensions/py38_cpu as PyTorch extensions root... Creating extension directory /root/.cache/torch_extensions/py38_cpu/adaptive_cp...Creating extension directory /root/.cache/torch_extensions/py38_cpu/adaptive_cp...Creating extension directory /root/.cache/torch_extensions/py38_cpu/adaptive_cp... Emitting ninja build file /root/.cache/torch_extensions/py38_cpu/adaptive_cp/build.ninja... Building extension module adaptive_cp... Allowing ninja to set a default number of workers... (overridable by setting the environment variable MAX_JOBS=N) Using /root/.cache/torch_extensions/py38_cpu as PyTorch extensions root... Using /root/.cache/torch_extensions/py38_cpu as PyTorch extensions root... Using /root/.cache/torch_extensions/py38_cpu as PyTorch extensions root... Using /root/.cache/torch_extensions/py38_cpu as PyTorch extensions root... Using /root/.cache/torch_extensions/py38_cpu as PyTorch extensions root... Using /root/.cache/torch_extensions/py38_cpu as PyTorch extensions root... Using /root/.cache/torch_extensions/py38_cpu as PyTorch extensions root... Using /root/.cache/torch_extensions/py38_cpu as PyTorch extensions root... Using /root/.cache/torch_extensions/py38_cpu as PyTorch extensions root... Using /root/.cache/torch_extensions/py38_cpu as PyTorch extensions root... Using /root/.cache/torch_extensions/py38_cpu as PyTorch extensions root... Using /root/.cache/torch_extensions/py38_cpu as PyTorch extensions root... Using /root/.cache/torch_extensions/py38_cpu as PyTorch extensions root... [1/2] c++ -MMD -MF adaptive_cp.o.d -DTORCH_EXTENSION_NAME=adaptive_cp -DTORCH_API_INCLUDE_EXTENSION_H -DPYBIND11_COMPILER_TYPE=\"_gcc\" -DPYBIND11_STDLIB=\"_libstdcpp\" -DPYBIND11_BUILD_ABI=\"_cxxabi1011\" -I/usr/local/Ascend/ascend-toolkit/latest/include -I/root/miniconda3/envs/py38/lib/python3.8/site-packages/torch_npu/include -I/root/miniconda3/envs/py38/lib/python3.8/site-packages/torch_npu/third_party -I/root/miniconda3/envs/py38/lib/python3.8/site-packages/torch_npu/acl -I/root/miniconda3/envs/py38/lib/python3.8/site-packages/torch_npu/inc -isystem /root/miniconda3/envs/py38/lib/python3.8/site-packages/torch/include -isystem /root/miniconda3/envs/py38/lib/python3.8/site-packages/torch/include/torch/csrc/api/include -isystem /root/miniconda3/envs/py38/lib/python3.8/site-packages/torch/include/TH -isystem /root/miniconda3/envs/py38/lib/python3.8/site-packages/torch/include/THC -isystem /root/miniconda3/envs/py38/include/python3.8 -D_GLIBCXX_USE_CXX11_ABI=0 -fPIC -std=c++17 -fstack-protector-all -Wl,-z,relro,-z,now,-z,noexecstack -fPIC -pie -Wl,--disable-new-dtags,--rpath -s -O2 -c local_disk/cognitron_vl/third_party/MindSpeed_core_r0.6.0/mindspeed/ops/csrc/algorithm/adaptive_cp/adaptive_cp.cpp -o adaptive_cp.o [2/2] c++ adaptive_cp.o -shared -L/usr/local/Ascend/ascend-toolkit/latest/lib64 -lascendcl -L/root/miniconda3/envs/py38/lib/python3.8/site-packages/torch_npu/lib -ltorch_npu -L/root/miniconda3/envs/py38/lib/python3.8/site-packages/torch/lib -lc10 -ltorch_cpu -ltorch -ltorch_python -o adaptive_cp.so Loading extension module adaptive_cp... local_disk/cognitron_vl/third_party/MindSpeed_core_r0.6.0/mindspeed/core/tensor_parallel/layers.py:30: UserWarning: failed to generate the npu_matmul_add_fp32 warnings.warn("failed to generate the npu_matmul_add_fp32") Loading extension module adaptive_cp... local_disk/cognitron_vl/third_party/MindSpeed_core_r0.6.0/mindspeed/core/tensor_parallel/layers.py:30: UserWarning: failed to generate the npu_matmul_add_fp32 warnings.warn("failed to generate the npu_matmul_add_fp32") Loading extension module adaptive_cp... local_disk/cognitron_vl/third_party/MindSpeed_core_r0.6.0/mindspeed/core/tensor_parallel/layers.py:30: UserWarning: failed to generate the npu_matmul_add_fp32 warnings.warn("failed to generate the npu_matmul_add_fp32") Loading extension module adaptive_cp... Loading extension module adaptive_cp... local_disk/cognitron_vl/third_party/MindSpeed_core_r0.6.0/mindspeed/core/tensor_parallel/layers.py:30: UserWarning: failed to generate the npu_matmul_add_fp32 warnings.warn("failed to generate the npu_matmul_add_fp32") local_disk/cognitron_vl/third_party/MindSpeed_core_r0.6.0/mindspeed/core/tensor_parallel/layers.py:30: UserWarning: failed to generate the npu_matmul_add_fp32 warnings.warn("failed to generate the npu_matmul_add_fp32") Loading extension module adaptive_cp... local_disk/cognitron_vl/third_party/MindSpeed_core_r0.6.0/mindspeed/core/tensor_parallel/layers.py:30: UserWarning: failed to generate the npu_matmul_add_fp32 warnings.warn("failed to generate the npu_matmul_add_fp32") Loading extension module adaptive_cp... local_disk/cognitron_vl/third_party/MindSpeed_core_r0.6.0/mindspeed/core/tensor_parallel/layers.py:30: UserWarning: failed to generate the npu_matmul_add_fp32 warnings.warn("failed to generate the npu_matmul_add_fp32") Loading extension module adaptive_cp... local_disk/cognitron_vl/third_party/MindSpeed_core_r0.6.0/mindspeed/core/tensor_parallel/layers.py:30: UserWarning: failed to generate the npu_matmul_add_fp32 warnings.warn("failed to generate the npu_matmul_add_fp32") Loading extension module adaptive_cp... Loading extension module adaptive_cp... Loading extension module adaptive_cp... local_disk/cognitron_vl/third_party/MindSpeed_core_r0.6.0/mindspeed/core/tensor_parallel/layers.py:30: UserWarning: failed to generate the npu_matmul_add_fp32 warnings.warn("failed to generate the npu_matmul_add_fp32") Loading extension module adaptive_cp... Loading extension module adaptive_cp... local_disk/cognitron_vl/third_party/MindSpeed_core_r0.6.0/mindspeed/core/tensor_parallel/layers.py:30: UserWarning: failed to generate the npu_matmul_add_fp32 warnings.warn("failed to generate the npu_matmul_add_fp32") Loading extension module adaptive_cp... Loading extension module adaptive_cp... local_disk/cognitron_vl/third_party/MindSpeed_core_r0.6.0/mindspeed/core/tensor_parallel/layers.py:30: UserWarning: failed to generate the npu_matmul_add_fp32 warnings.warn("failed to generate the npu_matmul_add_fp32") local_disk/cognitron_vl/third_party/MindSpeed_core_r0.6.0/mindspeed/core/tensor_parallel/layers.py:30: UserWarning: failed to generate the npu_matmul_add_fp32 warnings.warn("failed to generate the npu_matmul_add_fp32") Loading extension module adaptive_cp... local_disk/cognitron_vl/third_party/MindSpeed_core_r0.6.0/mindspeed/core/tensor_parallel/layers.py:30: UserWarning: failed to generate the npu_matmul_add_fp32 warnings.warn("failed to generate the npu_matmul_add_fp32") local_disk/cognitron_vl/third_party/MindSpeed_core_r0.6.0/mindspeed/core/tensor_parallel/layers.py:30: UserWarning: failed to generate the npu_matmul_add_fp32 warnings.warn("failed to generate the npu_matmul_add_fp32") local_disk/cognitron_vl/third_party/MindSpeed_core_r0.6.0/mindspeed/core/tensor_parallel/layers.py:30: UserWarning: failed to generate the npu_matmul_add_fp32 warnings.warn("failed to generate the npu_matmul_add_fp32") local_disk/cognitron_vl/third_party/MindSpeed_core_r0.6.0/mindspeed/core/tensor_parallel/layers.py:30: UserWarning: failed to generate the npu_matmul_add_fp32 warnings.warn("failed to generate the npu_matmul_add_fp32") /root/miniconda3/envs/py38/lib/python3.8/site-packages/torchvision/io/image.py:13: UserWarning: Failed to load image Python extension: 'libc10_cuda.so: cannot open shared object file: No such file or directory'If you don't plan on using image functionality from `torchvision.io`, you can ignore this warning. Otherwise, there might be something wrong with your environment. Did you have `libjpeg` or `libpng` installed before building `torchvision` from source? warn( /root/miniconda3/envs/py38/lib/python3.8/site-packages/torchvision/io/image.py:13: UserWarning: Failed to load image Python extension: 'libc10_cuda.so: cannot open shared object file: No such file or directory'If you don't plan on using image functionality from `torchvision.io`, you can ignore this warning. Otherwise, there might be something wrong with your environment. Did you have `libjpeg` or `libpng` installed before building `torchvision` from source? warn( /root/miniconda3/envs/py38/lib/python3.8/site-packages/torchvision/io/image.py:13: UserWarning: Failed to load image Python extension: 'libc10_cuda.so: cannot open shared object file: No such file or directory'If you don't plan on using image functionality from `torchvision.io`, you can ignore this warning. Otherwise, there might be something wrong with your environment. Did you have `libjpeg` or `libpng` installed before building `torchvision` from source? warn( /root/miniconda3/envs/py38/lib/python3.8/site-packages/torchvision/io/image.py:13: UserWarning: Failed to load image Python extension: 'libc10_cuda.so: cannot open shared object file: No such file or directory'If you don't plan on using image functionality from `torchvision.io`, you can ignore this warning. Otherwise, there might be something wrong with your environment. Did you have `libjpeg` or `libpng` installed before building `torchvision` from source? warn( /root/miniconda3/envs/py38/lib/python3.8/site-packages/torchvision/io/image.py:13: UserWarning: Failed to load image Python extension: 'libc10_cuda.so: cannot open shared object file: No such file or directory'If you don't plan on using image functionality from `torchvision.io`, you can ignore this warning. Otherwise, there might be something wrong with your environment. Did you have `libjpeg` or `libpng` installed before building `torchvision` from source? warn( /root/miniconda3/envs/py38/lib/python3.8/site-packages/torchvision/io/image.py:13: UserWarning: Failed to load image Python extension: 'libc10_cuda.so: cannot open shared object file: No such file or directory'If you don't plan on using image functionality from `torchvision.io`, you can ignore this warning. Otherwise, there might be something wrong with your environment. Did you have `libjpeg` or `libpng` installed before building `torchvision` from source? warn( /root/miniconda3/envs/py38/lib/python3.8/site-packages/torchvision/io/image.py:13: UserWarning: Failed to load image Python extension: 'libc10_cuda.so: cannot open shared object file: No such file or directory'If you don't plan on using image functionality from `torchvision.io`, you can ignore this warning. Otherwise, there might be something wrong with your environment. Did you have `libjpeg` or `libpng` installed before building `torchvision` from source? warn( /root/miniconda3/envs/py38/lib/python3.8/site-packages/torchvision/io/image.py:13: UserWarning: Failed to load image Python extension: 'libc10_cuda.so: cannot open shared object file: No such file or directory'If you don't plan on using image functionality from `torchvision.io`, you can ignore this warning. Otherwise, there might be something wrong with your environment. Did you have `libjpeg` or `libpng` installed before building `torchvision` from source? warn( /root/miniconda3/envs/py38/lib/python3.8/site-packages/torchvision/io/image.py:13: UserWarning: Failed to load image Python extension: 'libc10_cuda.so: cannot open shared object file: No such file or directory'If you don't plan on using image functionality from `torchvision.io`, you can ignore this warning. Otherwise, there might be something wrong with your environment. Did you have `libjpeg` or `libpng` installed before building `torchvision` from source? warn( /root/miniconda3/envs/py38/lib/python3.8/site-packages/torchvision/io/image.py:13: UserWarning: Failed to load image Python extension: 'libc10_cuda.so: cannot open shared object file: No such file or directory'If you don't plan on using image functionality from `torchvision.io`, you can ignore this warning. Otherwise, there might be something wrong with your environment. Did you have `libjpeg` or `libpng` installed before building `torchvision` from source? warn( /root/miniconda3/envs/py38/lib/python3.8/site-packages/torchvision/io/image.py:13: UserWarning: Failed to load image Python extension: 'libc10_cuda.so: cannot open shared object file: No such file or directory'If you don't plan on using image functionality from `torchvision.io`, you can ignore this warning. Otherwise, there might be something wrong with your environment. Did you have `libjpeg` or `libpng` installed before building `torchvision` from source? warn( /root/miniconda3/envs/py38/lib/python3.8/site-packages/torchvision/io/image.py:13: UserWarning: Failed to load image Python extension: 'libc10_cuda.so: cannot open shared object file: No such file or directory'If you don't plan on using image functionality from `torchvision.io`, you can ignore this warning. Otherwise, there might be something wrong with your environment. Did you have `libjpeg` or `libpng` installed before building `torchvision` from source? warn( /root/miniconda3/envs/py38/lib/python3.8/site-packages/torchvision/io/image.py:13: UserWarning: Failed to load image Python extension: 'libc10_cuda.so: cannot open shared object file: No such file or directory'If you don't plan on using image functionality from `torchvision.io`, you can ignore this warning. Otherwise, there might be something wrong with your environment. Did you have `libjpeg` or `libpng` installed before building `torchvision` from source? warn( /root/miniconda3/envs/py38/lib/python3.8/site-packages/torchvision/io/image.py:13: UserWarning: Failed to load image Python extension: 'libc10_cuda.so: cannot open shared object file: No such file or directory'If you don't plan on using image functionality from `torchvision.io`, you can ignore this warning. Otherwise, there might be something wrong with your environment. Did you have `libjpeg` or `libpng` installed before building `torchvision` from source? warn( /root/miniconda3/envs/py38/lib/python3.8/site-packages/torchvision/io/image.py:13: UserWarning: Failed to load image Python extension: 'libc10_cuda.so: cannot open shared object file: No such file or directory'If you don't plan on using image functionality from `torchvision.io`, you can ignore this warning. Otherwise, there might be something wrong with your environment. Did you have `libjpeg` or `libpng` installed before building `torchvision` from source? warn( /root/miniconda3/envs/py38/lib/python3.8/site-packages/torchvision/io/image.py:13: UserWarning: Failed to load image Python extension: 'libc10_cuda.so: cannot open shared object file: No such file or directory'If you don't plan on using image functionality from `torchvision.io`, you can ignore this warning. Otherwise, there might be something wrong with your environment. Did you have `libjpeg` or `libpng` installed before building `torchvision` from source? warn( > compiling dataset index builder ... make: Entering directory 'local_disk/cognitron_vl/third_party/Megatron-LM_core_r0.6.0/megatron/core/datasets' make: Nothing to be done for 'default'. make: Leaving directory 'local_disk/cognitron_vl/third_party/Megatron-LM_core_r0.6.0/megatron/core/datasets' >>> done with dataset index builder. Compilation time: 0.127 seconds vision_projector_recompute False vision_projector_recompute False vision_projector_recompute False vision_projector_recompute False vision_projector_recompute False vision_projector_recompute False vision_projector_recompute False vision_projector_recompute False vision_projector_recompute False vision_projector_recompute False vision_projector_recompute False vision_projector_recompute False vision_projector_recompute False vision_projector_recompute False vision_projector_recompute False vision_projector_recompute False vision_model_freeze => set param external_feature_model.vit.class_token torch.Size([1, 1, 1024]) requires grad to False. => set param external_feature_model.vit.conv1.weight torch.Size([1024, 3, 14, 14]) requires grad to False. => set param external_feature_model.vit.conv1.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.position_embeddings.weight torch.Size([1025, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. vision_model_freeze=> set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.class_token torch.Size([1, 1, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.conv1.weight torch.Size([1024, 3, 14, 14]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.conv1.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.position_embeddings.weight torch.Size([1025, 1024]) requires grad to False. vision_model_freeze => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.class_token torch.Size([1, 1, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.conv1.weight torch.Size([1024, 3, 14, 14]) requires grad to False. => set param external_feature_model.vit.conv1.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.position_embeddings.weight torch.Size([1025, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. vision_model_freeze => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.class_token torch.Size([1, 1, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.conv1.weight torch.Size([1024, 3, 14, 14]) requires grad to False. => set param external_feature_model.vit.conv1.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.position_embeddings.weight torch.Size([1025, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls2 torch.Size([1024]) requires grad to False.vision_model_freeze => set param external_feature_model.vit.decoder.layers.23.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.class_token torch.Size([1, 1, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.conv1.weight torch.Size([1024, 3, 14, 14]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.conv1.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.position_embeddings.weight torch.Size([1025, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.weight torch.Size([1024]) requires grad to False.vision_model_freeze=> set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.class_token torch.Size([1, 1, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.conv1.weight torch.Size([1024, 3, 14, 14]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.conv1.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.position_embeddings.weight torch.Size([1025, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. vision_model_freeze=> set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.class_token torch.Size([1, 1, 1024]) requires grad to False. => set param external_feature_model.vit.conv1.weight torch.Size([1024, 3, 14, 14]) requires grad to False. => set param external_feature_model.vit.conv1.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.position_embeddings.weight torch.Size([1025, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.weight torch.Size([1024]) requires grad to False.model GPTVLModel( (external_feature_model): MegatronVisionModel( (vit): InternViTModel( (conv1): Conv2d(3, 1024, kernel_size=(14, 14), stride=(14, 14)) (position_embeddings): Embedding(1025, 1024) (decoder): TransformerBlock( (layers): ModuleList( (0-23): 24 x InternViTTransformerLayer( (input_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() ) (self_attn_bda): IdentityFuncOp() (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) (mlp_bda): IdentityFuncOp() ) ) ) ) (vision_projection): MultimodalProjector( (encoder): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) (pre_proj_layernorm): LayerNorm((4096,), eps=1e-05, elementwise_affine=True) ) (embedding): LanguageModelEmbedding( (word_embeddings): VocabParallelEmbedding() (embedding_dropout): Dropout(p=0.0, inplace=False) ) (rotary_pos_emb): RotaryEmbedding() (decoder): TransformerBlock( (layers): ModuleList( (0-47): 48 x TransformerLayer( (input_layernorm): RMSNorm() (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() (q_layernorm): IdentityOp() (k_layernorm): IdentityOp() ) (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): RMSNorm() (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) ) (final_layernorm): RMSNorm() ) (output_layer): ColumnParallelLinear() ) => set param external_feature_model.vit.decoder.layers.22.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.input_layernorm.bias torch.Size([1024]) requires grad to False. vision_model_freeze=> set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.class_token torch.Size([1, 1, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.conv1.weight torch.Size([1024, 3, 14, 14]) requires grad to False. => set param external_feature_model.vit.conv1.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.position_embeddings.weight torch.Size([1025, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. model GPTVLModel( (external_feature_model): MegatronVisionModel( (vit): InternViTModel( (conv1): Conv2d(3, 1024, kernel_size=(14, 14), stride=(14, 14)) (position_embeddings): Embedding(1025, 1024) (decoder): TransformerBlock( (layers): ModuleList( (0-23): 24 x InternViTTransformerLayer( (input_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() ) (self_attn_bda): IdentityFuncOp() (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) (mlp_bda): IdentityFuncOp() ) ) ) ) (vision_projection): MultimodalProjector( (encoder): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) (pre_proj_layernorm): LayerNorm((4096,), eps=1e-05, elementwise_affine=True) ) (embedding): LanguageModelEmbedding( (word_embeddings): VocabParallelEmbedding() (embedding_dropout): Dropout(p=0.0, inplace=False) ) (rotary_pos_emb): RotaryEmbedding() (decoder): TransformerBlock( (layers): ModuleList( (0-47): 48 x TransformerLayer( (input_layernorm): RMSNorm() (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() (q_layernorm): IdentityOp() (k_layernorm): IdentityOp() ) (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): RMSNorm() (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) ) (final_layernorm): RMSNorm() ) (output_layer): ColumnParallelLinear() )=> set param external_feature_model.vit.decoder.layers.16.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.ls1 torch.Size([1024]) requires grad to False. model GPTVLModel( (external_feature_model): MegatronVisionModel( (vit): InternViTModel( (conv1): Conv2d(3, 1024, kernel_size=(14, 14), stride=(14, 14)) (position_embeddings): Embedding(1025, 1024) (decoder): TransformerBlock( (layers): ModuleList( (0-23): 24 x InternViTTransformerLayer( (input_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() ) (self_attn_bda): IdentityFuncOp() (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) (mlp_bda): IdentityFuncOp() ) ) ) ) (vision_projection): MultimodalProjector( (encoder): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) (pre_proj_layernorm): LayerNorm((4096,), eps=1e-05, elementwise_affine=True) ) (embedding): LanguageModelEmbedding( (word_embeddings): VocabParallelEmbedding() (embedding_dropout): Dropout(p=0.0, inplace=False) ) (rotary_pos_emb): RotaryEmbedding() (decoder): TransformerBlock( (layers): ModuleList( (0-47): 48 x TransformerLayer( (input_layernorm): RMSNorm() (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() (q_layernorm): IdentityOp() (k_layernorm): IdentityOp() ) (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): RMSNorm() (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) ) (final_layernorm): RMSNorm() ) (output_layer): ColumnParallelLinear() ) => set param external_feature_model.vit.decoder.layers.18.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.bias torch.Size([1024]) requires grad to False. model GPTVLModel( (external_feature_model): MegatronVisionModel( (vit): InternViTModel( (conv1): Conv2d(3, 1024, kernel_size=(14, 14), stride=(14, 14)) (position_embeddings): Embedding(1025, 1024) (decoder): TransformerBlock( (layers): ModuleList( (0-23): 24 x InternViTTransformerLayer( (input_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() ) (self_attn_bda): IdentityFuncOp() (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) (mlp_bda): IdentityFuncOp() ) ) ) ) (vision_projection): MultimodalProjector( (encoder): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) (pre_proj_layernorm): LayerNorm((4096,), eps=1e-05, elementwise_affine=True) ) (embedding): LanguageModelEmbedding( (word_embeddings): VocabParallelEmbedding() (embedding_dropout): Dropout(p=0.0, inplace=False) ) (rotary_pos_emb): RotaryEmbedding() (decoder): TransformerBlock( (layers): ModuleList( (0-47): 48 x TransformerLayer( (input_layernorm): RMSNorm() (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() (q_layernorm): IdentityOp() (k_layernorm): IdentityOp() ) (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): RMSNorm() (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) ) (final_layernorm): RMSNorm() ) (output_layer): ColumnParallelLinear() )=> set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. model GPTVLModel( (external_feature_model): MegatronVisionModel( (vit): InternViTModel( (conv1): Conv2d(3, 1024, kernel_size=(14, 14), stride=(14, 14)) (position_embeddings): Embedding(1025, 1024) (decoder): TransformerBlock( (layers): ModuleList( (0-23): 24 x InternViTTransformerLayer( (input_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() ) (self_attn_bda): IdentityFuncOp() (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) (mlp_bda): IdentityFuncOp() ) ) ) ) (vision_projection): MultimodalProjector( (encoder): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) (pre_proj_layernorm): LayerNorm((4096,), eps=1e-05, elementwise_affine=True) ) (embedding): LanguageModelEmbedding( (word_embeddings): VocabParallelEmbedding() (embedding_dropout): Dropout(p=0.0, inplace=False) ) (rotary_pos_emb): RotaryEmbedding() (decoder): TransformerBlock( (layers): ModuleList( (0-47): 48 x TransformerLayer( (input_layernorm): RMSNorm() (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() (q_layernorm): IdentityOp() (k_layernorm): IdentityOp() ) (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): RMSNorm() (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) ) (final_layernorm): RMSNorm() ) (output_layer): ColumnParallelLinear() )=> set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. model GPTVLModel( (external_feature_model): MegatronVisionModel( (vit): InternViTModel( (conv1): Conv2d(3, 1024, kernel_size=(14, 14), stride=(14, 14)) (position_embeddings): Embedding(1025, 1024) (decoder): TransformerBlock( (layers): ModuleList( (0-23): 24 x InternViTTransformerLayer( (input_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() ) (self_attn_bda): IdentityFuncOp() (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) (mlp_bda): IdentityFuncOp() ) ) ) ) (vision_projection): MultimodalProjector( (encoder): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) (pre_proj_layernorm): LayerNorm((4096,), eps=1e-05, elementwise_affine=True) ) (embedding): LanguageModelEmbedding( (word_embeddings): VocabParallelEmbedding() (embedding_dropout): Dropout(p=0.0, inplace=False) ) (rotary_pos_emb): RotaryEmbedding() (decoder): TransformerBlock( (layers): ModuleList( (0-47): 48 x TransformerLayer( (input_layernorm): RMSNorm() (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() (q_layernorm): IdentityOp() (k_layernorm): IdentityOp() ) (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): RMSNorm() (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) ) (final_layernorm): RMSNorm() ) (output_layer): ColumnParallelLinear() ) vision_model_freeze => set param external_feature_model.vit.class_token torch.Size([1, 1, 1024]) requires grad to False. => set param external_feature_model.vit.conv1.weight torch.Size([1024, 3, 14, 14]) requires grad to False. => set param external_feature_model.vit.conv1.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.position_embeddings.weight torch.Size([1025, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.input_layernorm.bias torch.Size([1024]) requires grad to False. model GPTVLModel( (external_feature_model): MegatronVisionModel( (vit): InternViTModel( (conv1): Conv2d(3, 1024, kernel_size=(14, 14), stride=(14, 14)) (position_embeddings): Embedding(1025, 1024) (decoder): TransformerBlock( (layers): ModuleList( (0-23): 24 x InternViTTransformerLayer( (input_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() ) (self_attn_bda): IdentityFuncOp() (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) (mlp_bda): IdentityFuncOp() ) ) ) ) (vision_projection): MultimodalProjector( (encoder): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) (pre_proj_layernorm): LayerNorm((4096,), eps=1e-05, elementwise_affine=True) ) (embedding): LanguageModelEmbedding( (word_embeddings): VocabParallelEmbedding() (embedding_dropout): Dropout(p=0.0, inplace=False) ) (rotary_pos_emb): RotaryEmbedding() (decoder): TransformerBlock( (layers): ModuleList( (0-47): 48 x TransformerLayer( (input_layernorm): RMSNorm() (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() (q_layernorm): IdentityOp() (k_layernorm): IdentityOp() ) (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): RMSNorm() (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) ) (final_layernorm): RMSNorm() ) (output_layer): ColumnParallelLinear() )=> set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. model GPTVLModel( (external_feature_model): MegatronVisionModel( (vit): InternViTModel( (conv1): Conv2d(3, 1024, kernel_size=(14, 14), stride=(14, 14)) (position_embeddings): Embedding(1025, 1024) (decoder): TransformerBlock( (layers): ModuleList( (0-23): 24 x InternViTTransformerLayer( (input_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() ) (self_attn_bda): IdentityFuncOp() (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) (mlp_bda): IdentityFuncOp() ) ) ) ) (vision_projection): MultimodalProjector( (encoder): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) (pre_proj_layernorm): LayerNorm((4096,), eps=1e-05, elementwise_affine=True) ) (embedding): LanguageModelEmbedding( (word_embeddings): VocabParallelEmbedding() (embedding_dropout): Dropout(p=0.0, inplace=False) ) (rotary_pos_emb): RotaryEmbedding() (decoder): TransformerBlock( (layers): ModuleList( (0-47): 48 x TransformerLayer( (input_layernorm): RMSNorm() (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() (q_layernorm): IdentityOp() (k_layernorm): IdentityOp() ) (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): RMSNorm() (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) ) (final_layernorm): RMSNorm() ) (output_layer): ColumnParallelLinear() ) vision_model_freeze => set param external_feature_model.vit.class_token torch.Size([1, 1, 1024]) requires grad to False. => set param external_feature_model.vit.conv1.weight torch.Size([1024, 3, 14, 14]) requires grad to False. => set param external_feature_model.vit.conv1.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.position_embeddings.weight torch.Size([1025, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.weight torch.Size([1024]) requires grad to False. vision_model_freeze=> set param external_feature_model.vit.decoder.layers.2.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.class_token torch.Size([1, 1, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.conv1.weight torch.Size([1024, 3, 14, 14]) requires grad to False. => set param external_feature_model.vit.conv1.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.position_embeddings.weight torch.Size([1025, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. vision_model_freeze=> set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.class_token torch.Size([1, 1, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.conv1.weight torch.Size([1024, 3, 14, 14]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.conv1.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.position_embeddings.weight torch.Size([1025, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. model GPTVLModel( (external_feature_model): MegatronVisionModel( (vit): InternViTModel( (conv1): Conv2d(3, 1024, kernel_size=(14, 14), stride=(14, 14)) (position_embeddings): Embedding(1025, 1024) (decoder): TransformerBlock( (layers): ModuleList( (0-23): 24 x InternViTTransformerLayer( (input_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() ) (self_attn_bda): IdentityFuncOp() (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) (mlp_bda): IdentityFuncOp() ) ) ) ) (vision_projection): MultimodalProjector( (encoder): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) (pre_proj_layernorm): LayerNorm((4096,), eps=1e-05, elementwise_affine=True) ) (embedding): LanguageModelEmbedding( (word_embeddings): VocabParallelEmbedding() (embedding_dropout): Dropout(p=0.0, inplace=False) ) (rotary_pos_emb): RotaryEmbedding() (decoder): TransformerBlock( (layers): ModuleList( (0-47): 48 x TransformerLayer( (input_layernorm): RMSNorm() (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() (q_layernorm): IdentityOp() (k_layernorm): IdentityOp() ) (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): RMSNorm() (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) ) (final_layernorm): RMSNorm() ) (output_layer): ColumnParallelLinear() )=> set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. model GPTVLModel( (external_feature_model): MegatronVisionModel( (vit): InternViTModel( (conv1): Conv2d(3, 1024, kernel_size=(14, 14), stride=(14, 14)) (position_embeddings): Embedding(1025, 1024) (decoder): TransformerBlock( (layers): ModuleList( (0-23): 24 x InternViTTransformerLayer( (input_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() ) (self_attn_bda): IdentityFuncOp() (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) (mlp_bda): IdentityFuncOp() ) ) ) ) (vision_projection): MultimodalProjector( (encoder): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) (pre_proj_layernorm): LayerNorm((4096,), eps=1e-05, elementwise_affine=True) ) (embedding): LanguageModelEmbedding( (word_embeddings): VocabParallelEmbedding() (embedding_dropout): Dropout(p=0.0, inplace=False) ) (rotary_pos_emb): RotaryEmbedding() (decoder): TransformerBlock( (layers): ModuleList( (0-47): 48 x TransformerLayer( (input_layernorm): RMSNorm() (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() (q_layernorm): IdentityOp() (k_layernorm): IdentityOp() ) (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): RMSNorm() (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) ) (final_layernorm): RMSNorm() ) (output_layer): ColumnParallelLinear() ) model GPTVLModel( (external_feature_model): MegatronVisionModel( (vit): InternViTModel( (conv1): Conv2d(3, 1024, kernel_size=(14, 14), stride=(14, 14)) (position_embeddings): Embedding(1025, 1024) (decoder): TransformerBlock( (layers): ModuleList( (0-23): 24 x InternViTTransformerLayer( (input_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() ) (self_attn_bda): IdentityFuncOp() (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) (mlp_bda): IdentityFuncOp() ) ) ) ) (vision_projection): MultimodalProjector( (encoder): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) (pre_proj_layernorm): LayerNorm((4096,), eps=1e-05, elementwise_affine=True) ) (embedding): LanguageModelEmbedding( (word_embeddings): VocabParallelEmbedding() (embedding_dropout): Dropout(p=0.0, inplace=False) ) (rotary_pos_emb): RotaryEmbedding() (decoder): TransformerBlock( (layers): ModuleList( (0-47): 48 x TransformerLayer( (input_layernorm): RMSNorm() (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() (q_layernorm): IdentityOp() (k_layernorm): IdentityOp() ) (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): RMSNorm() (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) ) (final_layernorm): RMSNorm() ) (output_layer): ColumnParallelLinear() ) model GPTVLModel( (external_feature_model): MegatronVisionModel( (vit): InternViTModel( (conv1): Conv2d(3, 1024, kernel_size=(14, 14), stride=(14, 14)) (position_embeddings): Embedding(1025, 1024) (decoder): TransformerBlock( (layers): ModuleList( (0-23): 24 x InternViTTransformerLayer( (input_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() ) (self_attn_bda): IdentityFuncOp() (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) (mlp_bda): IdentityFuncOp() ) ) ) ) (vision_projection): MultimodalProjector( (encoder): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) (pre_proj_layernorm): LayerNorm((4096,), eps=1e-05, elementwise_affine=True) ) (embedding): LanguageModelEmbedding( (word_embeddings): VocabParallelEmbedding() (embedding_dropout): Dropout(p=0.0, inplace=False) ) (rotary_pos_emb): RotaryEmbedding() (decoder): TransformerBlock( (layers): ModuleList( (0-47): 48 x TransformerLayer( (input_layernorm): RMSNorm() (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() (q_layernorm): IdentityOp() (k_layernorm): IdentityOp() ) (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): RMSNorm() (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) ) (final_layernorm): RMSNorm() ) (output_layer): ColumnParallelLinear() ) vision_model_freeze => set param external_feature_model.vit.class_token torch.Size([1, 1, 1024]) requires grad to False. => set param external_feature_model.vit.conv1.weight torch.Size([1024, 3, 14, 14]) requires grad to False. => set param external_feature_model.vit.conv1.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.position_embeddings.weight torch.Size([1025, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. vision_model_freeze=> set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.class_token torch.Size([1, 1, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.conv1.weight torch.Size([1024, 3, 14, 14]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.conv1.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.position_embeddings.weight torch.Size([1025, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.11.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. model GPTVLModel( (external_feature_model): MegatronVisionModel( (vit): InternViTModel( (conv1): Conv2d(3, 1024, kernel_size=(14, 14), stride=(14, 14)) (position_embeddings): Embedding(1025, 1024) (decoder): TransformerBlock( (layers): ModuleList( (0-23): 24 x InternViTTransformerLayer( (input_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() ) (self_attn_bda): IdentityFuncOp() (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) (mlp_bda): IdentityFuncOp() ) ) ) ) (vision_projection): MultimodalProjector( (encoder): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) (pre_proj_layernorm): LayerNorm((4096,), eps=1e-05, elementwise_affine=True) ) (embedding): LanguageModelEmbedding( (word_embeddings): VocabParallelEmbedding() (embedding_dropout): Dropout(p=0.0, inplace=False) ) (rotary_pos_emb): RotaryEmbedding() (decoder): TransformerBlock( (layers): ModuleList( (0-47): 48 x TransformerLayer( (input_layernorm): RMSNorm() (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() (q_layernorm): IdentityOp() (k_layernorm): IdentityOp() ) (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): RMSNorm() (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) ) (final_layernorm): RMSNorm() ) (output_layer): ColumnParallelLinear() ) model GPTVLModel( (external_feature_model): MegatronVisionModel( (vit): InternViTModel( (conv1): Conv2d(3, 1024, kernel_size=(14, 14), stride=(14, 14)) (position_embeddings): Embedding(1025, 1024) (decoder): TransformerBlock( (layers): ModuleList( (0-23): 24 x InternViTTransformerLayer( (input_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() ) (self_attn_bda): IdentityFuncOp() (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) (mlp_bda): IdentityFuncOp() ) ) ) ) (vision_projection): MultimodalProjector( (encoder): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) (pre_proj_layernorm): LayerNorm((4096,), eps=1e-05, elementwise_affine=True) ) (embedding): LanguageModelEmbedding( (word_embeddings): VocabParallelEmbedding() (embedding_dropout): Dropout(p=0.0, inplace=False) ) (rotary_pos_emb): RotaryEmbedding() (decoder): TransformerBlock( (layers): ModuleList( (0-47): 48 x TransformerLayer( (input_layernorm): RMSNorm() (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() (q_layernorm): IdentityOp() (k_layernorm): IdentityOp() ) (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): RMSNorm() (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) ) (final_layernorm): RMSNorm() ) (output_layer): ColumnParallelLinear() ) vision_model_freeze => set param external_feature_model.vit.class_token torch.Size([1, 1, 1024]) requires grad to False. => set param external_feature_model.vit.conv1.weight torch.Size([1024, 3, 14, 14]) requires grad to False. => set param external_feature_model.vit.conv1.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.position_embeddings.weight torch.Size([1025, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. vision_model_freeze=> set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.class_token torch.Size([1, 1, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.conv1.weight torch.Size([1024, 3, 14, 14]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.conv1.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.position_embeddings.weight torch.Size([1025, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.0.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.1.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls1 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.2.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.3.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.3.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc1.bias torch.Size([512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.4.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.5.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.5.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls2 torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.6.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.7.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.7.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.8.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.23.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.input_layernorm.bias torch.Size([1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.9.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False.=> set param external_feature_model.vit.decoder.layers.10.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.10.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.11.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.12.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.13.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.14.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.15.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.16.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.17.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.18.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.19.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.20.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.21.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.22.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls1 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.ls2 torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.input_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.input_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.weight torch.Size([1024, 128]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_proj.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.weight torch.Size([384, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.self_attention.linear_qkv.bias torch.Size([384]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.weight torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.pre_mlp_layernorm.bias torch.Size([1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.weight torch.Size([512, 1024]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc1.bias torch.Size([512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.weight torch.Size([1024, 512]) requires grad to False. => set param external_feature_model.vit.decoder.layers.23.mlp.linear_fc2.bias torch.Size([1024]) requires grad to False. model GPTVLModel( (external_feature_model): MegatronVisionModel( (vit): InternViTModel( (conv1): Conv2d(3, 1024, kernel_size=(14, 14), stride=(14, 14)) (position_embeddings): Embedding(1025, 1024) (decoder): TransformerBlock( (layers): ModuleList( (0-23): 24 x InternViTTransformerLayer( (input_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() ) (self_attn_bda): IdentityFuncOp() (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) (mlp_bda): IdentityFuncOp() ) ) ) ) (vision_projection): MultimodalProjector( (encoder): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) (pre_proj_layernorm): LayerNorm((4096,), eps=1e-05, elementwise_affine=True) ) (embedding): LanguageModelEmbedding( (word_embeddings): VocabParallelEmbedding() (embedding_dropout): Dropout(p=0.0, inplace=False) ) (rotary_pos_emb): RotaryEmbedding() (decoder): TransformerBlock( (layers): ModuleList( (0-47): 48 x TransformerLayer( (input_layernorm): RMSNorm() (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() (q_layernorm): IdentityOp() (k_layernorm): IdentityOp() ) (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): RMSNorm() (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) ) (final_layernorm): RMSNorm() ) (output_layer): ColumnParallelLinear() ) model GPTVLModel( (external_feature_model): MegatronVisionModel( (vit): InternViTModel( (conv1): Conv2d(3, 1024, kernel_size=(14, 14), stride=(14, 14)) (position_embeddings): Embedding(1025, 1024) (decoder): TransformerBlock( (layers): ModuleList( (0-23): 24 x InternViTTransformerLayer( (input_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() ) (self_attn_bda): IdentityFuncOp() (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): LayerNorm((1024,), eps=1e-06, elementwise_affine=True) (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) (mlp_bda): IdentityFuncOp() ) ) ) ) (vision_projection): MultimodalProjector( (encoder): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) (pre_proj_layernorm): LayerNorm((4096,), eps=1e-05, elementwise_affine=True) ) (embedding): LanguageModelEmbedding( (word_embeddings): VocabParallelEmbedding() (embedding_dropout): Dropout(p=0.0, inplace=False) ) (rotary_pos_emb): RotaryEmbedding() (decoder): TransformerBlock( (layers): ModuleList( (0-47): 48 x TransformerLayer( (input_layernorm): RMSNorm() (self_attention): SelfAttention( (core_attention): DotProductAttention( (scale_mask_softmax): FusedScaleMaskSoftmax() (attention_dropout): Dropout(p=0.0, inplace=False) ) (linear_proj): RowParallelLinear() (linear_qkv): ColumnParallelLinear() (q_layernorm): IdentityOp() (k_layernorm): IdentityOp() ) (pre_cross_attn_layernorm): IdentityOp() (cross_attention): IdentityOp() (cross_attn_bda): IdentityFuncOp() (pre_mlp_layernorm): RMSNorm() (mlp): MLP( (linear_fc1): ColumnParallelLinear() (linear_fc2): RowParallelLinear() ) ) ) (final_layernorm): RMSNorm() ) (output_layer): ColumnParallelLinear() ) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.embedding.word_embeddings.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.embedding.word_embeddings.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.11.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.2.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.2.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.3.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.embedding.word_embeddings.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.0.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.0.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.2.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.18.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.3.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.3.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.11.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.11.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.11.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.11.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.26.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.11.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.18.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.18.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.18.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.18.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.18.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.18.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.18.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.35.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.35.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.embedding.word_embeddings.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.36.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.0.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.26.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.38.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.2.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.3.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.3.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.3.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.3.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.embedding.word_embeddings.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.26.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.43.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.43.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.44.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.2.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.44.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.35.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.35.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.47.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.11.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.47.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.36.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.47.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.final_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.output_layer.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.38.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.38.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.38.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.38.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.38.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.external_feature_model.pre_proj_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.embedding.word_embeddings.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.35.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.0.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.36.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.43.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.38.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.3.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.43.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.final_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.output_layer.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.43.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.embedding.word_embeddings.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.0.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.2.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.26.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.final_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.output_layer.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.embedding.word_embeddings.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.0.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.0.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.26.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.2.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.26.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.18.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.3.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.3.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.11.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.11.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.11.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.36.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.38.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.38.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.35.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.35.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.26.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.11.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.18.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.38.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.43.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.43.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.embedding.word_embeddings.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.18.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.47.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.43.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.44.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.47.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.18.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.47.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.final_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.output_layer.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.2.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.2.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.35.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.35.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.36.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.47.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.final_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.output_layer.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.26.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.26.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.11.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.44.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.35.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.36.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.36.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.47.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.47.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.47.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.final_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.output_layer.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.35.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.35.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.18.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.36.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.36.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.43.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.43.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.43.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.47.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.final_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.output_layer.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.26.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.26.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.47.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.final_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.output_layer.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.final_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.output_layer.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.embedding.word_embeddings.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.final_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.output_layer.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.embedding.word_embeddings.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.embedding.word_embeddings.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.0.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.embedding.word_embeddings.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.3.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.3.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.2.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.2.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.2.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.3.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.3.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.embedding.word_embeddings.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.2.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.11.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.external_feature_model.pre_proj_layernorm.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.embedding.word_embeddings.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.0.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.0.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.18.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.2.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.18.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.2.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.3.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.3.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.18.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.44.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.18.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.5.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.47.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.47.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.47.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.final_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.output_layer.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.21.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.22.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.23.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.24.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.25.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.26.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.26.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.35.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.28.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.36.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.36.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.11.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.35.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.38.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.35.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.38.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.36.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.11.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.35.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.35.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.43.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.36.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.44.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.44.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.44.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.38.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.38.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.43.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.44.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.47.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.final_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.output_layer.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.43.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.47.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.final_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.output_layer.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.final_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.output_layer.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.26.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.vision_projection.encoder.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.external_feature_model.pre_proj_layernorm.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.embedding.word_embeddings.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.27.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.0.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.1.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.1.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.29.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.2.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.2.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.2.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.30.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.3.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.3.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.31.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.3.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.4.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.32.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.4.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.5.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.33.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.6.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.6.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.34.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.35.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.35.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.7.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.35.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.7.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.36.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.8.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.36.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.8.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.9.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.9.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.37.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.38.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.10.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.11.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.11.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.11.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.39.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.12.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.40.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.12.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.13.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.41.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.13.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.14.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.42.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.43.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.43.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.15.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.15.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.43.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.44.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.44.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.44.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.16.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.44.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.16.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.17.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.45.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.input_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_proj.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.self_attention.linear_qkv.bias key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.pre_mlp_layernorm.weight key (0.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.18.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.46.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.19.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.mlp.linear_fc1.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.47.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.19.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.47.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.mlp.linear_fc2.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.final_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.output_layer.weight key (1.0, 1.0, False, False)_get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.20.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.21.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.22.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.23.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.24.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.25.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.26.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.27.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.28.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.29.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.30.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.31.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.32.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.33.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.34.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.35.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.36.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.37.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.38.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.39.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.40.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.41.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.42.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.43.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.44.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.45.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.46.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.input_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_proj.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.self_attention.linear_qkv.bias key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.pre_mlp_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.mlp.linear_fc1.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.layers.47.mlp.linear_fc2.weight key (1.0, 1.0, False, False) _get_param_groups name module.module.decoder.final_layernorm.weight key (0.0, 1.0, False, False) _get_param_groups name module.module.output_layer.weight key (1.0, 1.0, False, False) _load_base_checkpoint iteration_load_base_checkpoint iteration_load_base_checkpoint iteration_load_base_checkpoint iteration_load_base_checkpoint iteration_load_base_checkpoint iteration_load_base_checkpoint iteration_load_base_checkpoint iteration_load_base_checkpoint iteration_load_base_checkpoint iteration _load_base_checkpoint iteration _load_base_checkpoint iteration _load_base_checkpoint iteration _load_base_checkpoint iteration1000 10001000 10001000 10001000_load_base_checkpoint iteration_load_base_checkpoint iteration1000 1000 1000_load_base_checkpoint release1000 1000 _load_base_checkpoint release _load_base_checkpoint release _load_base_checkpoint release _load_base_checkpoint release_load_base_checkpoint releaseFalse _load_base_checkpoint release False_load_base_checkpoint release1000 1000False_load_base_checkpoint release1000 _load_base_checkpoint release 1000False_load_base_checkpoint release _load_base_checkpoint release _load_base_checkpoint release False _load_base_checkpoint release False FalseFalseFalse FalseFalse False _load_base_checkpoint release_load_base_checkpoint releaseFalse False False False _load_base_checkpoint /data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp2_stage3.sh/20241127_204213/iter_0001000/mp_rank_07/model_optim_rng.pt _load_base_checkpoint /data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp2_stage3.sh/20241127_204213/iter_0001000/mp_rank_06/model_optim_rng.pt _load_base_checkpoint /data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp2_stage3.sh/20241127_204213/iter_0001000/mp_rank_01/model_optim_rng.pt _load_base_checkpoint /data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp2_stage3.sh/20241127_204213/iter_0001000/mp_rank_02/model_optim_rng.pt _load_base_checkpoint /data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp2_stage3.sh/20241127_204213/iter_0001000/mp_rank_03/model_optim_rng.pt _load_base_checkpoint /data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp2_stage3.sh/20241127_204213/iter_0001000/mp_rank_04/model_optim_rng.pt _load_base_checkpoint /data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp2_stage3.sh/20241127_204213/iter_0001000/mp_rank_04/model_optim_rng.pt _load_base_checkpoint /data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp2_stage3.sh/20241127_204213/iter_0001000/mp_rank_05/model_optim_rng.pt _load_base_checkpoint /data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp2_stage3.sh/20241127_204213/iter_0001000/mp_rank_02/model_optim_rng.pt _load_base_checkpoint_load_base_checkpoint /data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp2_stage3.sh/20241127_204213/iter_0001000/mp_rank_01/model_optim_rng.pt/data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp2_stage3.sh/20241127_204213/iter_0001000/mp_rank_03/model_optim_rng.pt _load_base_checkpoint /data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp2_stage3.sh/20241127_204213/iter_0001000/mp_rank_06/model_optim_rng.pt _load_base_checkpoint /data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp2_stage3.sh/20241127_204213/iter_0001000/mp_rank_00/model_optim_rng.pt _load_base_checkpoint /data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp2_stage3.sh/20241127_204213/iter_0001000/mp_rank_07/model_optim_rng.pt _load_base_checkpoint /data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp2_stage3.sh/20241127_204213/iter_0001000/mp_rank_05/model_optim_rng.pt _load_base_checkpoint /data_2/output/LM/scripts/modellink/qwen25/finetune_qwen25_14b_intern_300m_ptd_tp8pp1cp2_stage3.sh/20241127_204213/iter_0001000/mp_rank_00/model_optim_rng.pt load_checkpoint iteration 0 load_checkpoint release False strict True load_checkpoint iteration 0 load_checkpoint release False strict True load_checkpoint iteration 0 load_checkpoint release False strict True load_checkpoint iteration 0 load_checkpoint release False strict True load_checkpoint iteration 0 load_checkpoint release False strict True load_checkpoint iteration 0 load_checkpoint release False strict True load_checkpoint iteration 0 load_checkpoint release False strict True load_checkpoint iteration 0 load_checkpoint release False strict True load_checkpoint iteration 0 load_checkpoint release False strict True load_checkpoint iteration 0 load_checkpoint release False strict True load_checkpoint iteration 0 load_checkpoint release False strict True load_checkpoint iteration 0 load_checkpoint release False strict True load_checkpoint iteration 0 load_checkpoint release False strict True load_checkpoint iteration 0 load_checkpoint release False strict True load_checkpoint iteration 0 load_checkpoint release False strict True load_checkpoint iteration 0 load_checkpoint release False strict True > rank 316 does not create GPT datasets ... > rank 306 does not create GPT datasets ... > rank 308 does not create GPT datasets ... > rank 304 is creating GPT datasets ... > rank 312 is creating GPT datasets ... > rank 318 does not create GPT datasets ... > rank 310 does not create GPT datasets ... > rank 311 does not create GPT datasets ...> rank 305 does not create GPT datasets ... > rank 317 does not create GPT datasets ... > rank 314 does not create GPT datasets ... > rank 307 does not create GPT datasets ... > rank 315 does not create GPT datasets ... > rank 319 does not create GPT datasets ... > rank 309 does not create GPT datasets ... > rank 313 does not create GPT datasets ... target_ratios [(1, 1), (1, 2), (2, 1), (3, 1), (1, 3), (2, 2), (4, 1), (1, 4), (5, 1), (1, 5), (1, 6), (6, 1), (3, 2), (2, 3), (7, 1), (1, 7), (4, 2), (2, 4), (1, 8), (8, 1), (1, 9), (3, 3), (9, 1), (2, 5), (5, 2), (10, 1), (1, 10), (11, 1), (1, 11), (12, 1), (3, 4), (4, 3), (1, 12), (6, 2), (2, 6)] possible_resolutions [[448, 448], [448, 896], [896, 448], [1344, 448], [448, 1344], [896, 896], [1792, 448], [448, 1792], [2240, 448], [448, 2240], [448, 2688], [2688, 448], [1344, 896], [896, 1344], [3136, 448], [448, 3136], [1792, 896], [896, 1792], [448, 3584], [3584, 448], [448, 4032], [1344, 1344], [4032, 448], [896, 2240], [2240, 896], [4480, 448], [448, 4480], [4928, 448], [448, 4928], [5376, 448], [1344, 1792], [1792, 1344], [448, 5376], [2688, 896], [896, 2688]] target_ratios [(1, 1), (1, 2), (2, 1), (3, 1), (1, 3), (2, 2), (4, 1), (1, 4), (5, 1), (1, 5), (1, 6), (6, 1), (3, 2), (2, 3), (7, 1), (1, 7), (4, 2), (2, 4), (1, 8), (8, 1), (1, 9), (3, 3), (9, 1), (2, 5), (5, 2), (10, 1), (1, 10), (11, 1), (1, 11), (12, 1), (3, 4), (4, 3), (1, 12), (6, 2), (2, 6)] possible_resolutions [[448, 448], [448, 896], [896, 448], [1344, 448], [448, 1344], [896, 896], [1792, 448], [448, 1792], [2240, 448], [448, 2240], [448, 2688], [2688, 448], [1344, 896], [896, 1344], [3136, 448], [448, 3136], [1792, 896], [896, 1792], [448, 3584], [3584, 448], [448, 4032], [1344, 1344], [4032, 448], [896, 2240], [2240, 896], [4480, 448], [448, 4480], [4928, 448], [448, 4928], [5376, 448], [1344, 1792], [1792, 1344], [448, 5376], [2688, 896], [896, 2688]] [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a726e5c0] mmco: unref short failure [h264 @ 0x55f3a726e5c0] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a726e5c0] mmco: unref short failure [h264 @ 0x55f3a726e5c0] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d43cde40] mmco: unref short failure [h264 @ 0x55e9d43cde40] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3a8540440] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure processed_samples 100 unjoint_samples 100 joint_samples 0 [180946, 180031] processed_samples 100 unjoint_samples 100 joint_samples 0 [180946, 180031] processed_samples 100 unjoint_samples 100 joint_samples 0 [236871, 195483] processed_samples 100 unjoint_samples 100 joint_samples 0 [236871, 195483] processed_samples 100 unjoint_samples 100 joint_samples 0 [211587, 211033] processed_samples 100 unjoint_samples 100 joint_samples 0 [211587, 211033] processed_samples 100 unjoint_samples 100 joint_samples 0 [163845, 166826] processed_samples 100 unjoint_samples 100 joint_samples 0 [163845, 166826] processed_samples 100 unjoint_samples 100 joint_samples 0 [184333, 181713] processed_samples 100 unjoint_samples 100 joint_samples 0 [184333, 181713] processed_samples 100 unjoint_samples 100 joint_samples 0 [112494, 110440] processed_samples 100 unjoint_samples 100 joint_samples 0 [112494, 110440] processed_samples 100 unjoint_samples 100 joint_samples 0 [182713, 180386] processed_samples 100 unjoint_samples 100 joint_samples 0 [182713, 180386] processed_samples 100 unjoint_samples 100 joint_samples 0 [120603, 122669] processed_samples 100 unjoint_samples 100 joint_samples 0 [120603, 122669] [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3a67ce700] mmco: unref short failure [h264 @ 0x55f3a67ce700] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d7323280] mmco: unref short failure [h264 @ 0x55e9d7323280] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure processed_samples 200 unjoint_samples 200 joint_samples 0 [312137, 303389] processed_samples 200 unjoint_samples 200 joint_samples 0 [357534, 355797] processed_samples 200 unjoint_samples 200 joint_samples 0 [357534, 355797] processed_samples 200 unjoint_samples 200 joint_samples 0 [312137, 303389] processed_samples 200 unjoint_samples 200 joint_samples 0 [305755, 304691] processed_samples 200 unjoint_samples 200 joint_samples 0 [305755, 304691] processed_samples 200 unjoint_samples 200 joint_samples 0 [337040, 335362] processed_samples 200 unjoint_samples 200 joint_samples 0 [279678, 288799] processed_samples 200 unjoint_samples 200 joint_samples 0 [337040, 335362] processed_samples 200 unjoint_samples 200 joint_samples 0 [279678, 288799] processed_samples 200 unjoint_samples 200 joint_samples 0 [237582, 239657] processed_samples 200 unjoint_samples 200 joint_samples 0 [237582, 239657] processed_samples 200 unjoint_samples 200 joint_samples 0 [339685, 343934] processed_samples 200 unjoint_samples 200 joint_samples 0 [339685, 343934] processed_samples 200 unjoint_samples 200 joint_samples 0 [369497, 370609] processed_samples 200 unjoint_samples 200 joint_samples 0 [369497, 370609] [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d446c1c0] mmco: unref short failure [h264 @ 0x55e9d446c1c0] mmco: unref short failure [h264 @ 0x55f3a6f63e00] mmco: unref short failure [h264 @ 0x55f3a6f63e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure processed_samples 300 unjoint_samples 300 joint_samples 0 [457890, 463393] [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure processed_samples 300 unjoint_samples 300 joint_samples 0 [457890, 463393] processed_samples 300 unjoint_samples 300 joint_samples 0 [452966, 452889] processed_samples 300 unjoint_samples 300 joint_samples 0 [452966, 452889] processed_samples 300 unjoint_samples 300 joint_samples 0 [508500, 508720] processed_samples 300 unjoint_samples 300 joint_samples 0 [508500, 508720] processed_samples 300 unjoint_samples 300 joint_samples 0 [480114, 479661] processed_samples 300 unjoint_samples 300 joint_samples 0 [480114, 479661] processed_samples 300 unjoint_samples 300 joint_samples 0 [531467, 516657] processed_samples 300 unjoint_samples 300 joint_samples 0 [531467, 516657] processed_samples 300 unjoint_samples 300 joint_samples 0 [504030, 493973] processed_samples 300 unjoint_samples 300 joint_samples 0 [504030, 493973] processed_samples 300 unjoint_samples 300 joint_samples 0 [650322, 539044] processed_samples 300 unjoint_samples 300 joint_samples 0 [650322, 539044] processed_samples 300 unjoint_samples 300 joint_samples 0 [502093, 500026] processed_samples 300 unjoint_samples 300 joint_samples 0 [502093, 500026] [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a67ce700] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a726e5c0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure processed_samples 400 unjoint_samples 400 joint_samples 0 [658462, 672467] processed_samples 400 unjoint_samples 400 joint_samples 0 [658462, 672467] processed_samples 400 unjoint_samples 400 joint_samples 0 [643297, 641890] [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure processed_samples 400 unjoint_samples 400 joint_samples 0 [643297, 641890] processed_samples 400 unjoint_samples 400 joint_samples 0 [612864, 613053] processed_samples 400 unjoint_samples 400 joint_samples 0 [612864, 613053] processed_samples 400 unjoint_samples 400 joint_samples 0 [677825, 678566] processed_samples 400 unjoint_samples 400 joint_samples 0 [677825, 678566] processed_samples 400 unjoint_samples 400 joint_samples 0 [667162, 658984] processed_samples 400 unjoint_samples 400 joint_samples 0 [634365, 633226] processed_samples 400 unjoint_samples 400 joint_samples 0 [672092, 659348] processed_samples 400 unjoint_samples 400 joint_samples 0 [672092, 659348] processed_samples 400 unjoint_samples 400 joint_samples 0 [667162, 658984] processed_samples 400 unjoint_samples 400 joint_samples 0 [634365, 633226] processed_samples 400 unjoint_samples 400 joint_samples 0 [723897, 734960] processed_samples 400 unjoint_samples 400 joint_samples 0 [723897, 734960] [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aaff3800] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d4f3cf00] mmco: unref short failure [h264 @ 0x55e9d4f3cf00] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3aaff3800] mmco: unref short failure [h264 @ 0x55f3aaff3800] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure processed_samples 500 unjoint_samples 500 joint_samples 0 [771027, 775107] processed_samples 500 unjoint_samples 500 joint_samples 0 [771027, 775107] processed_samples 500 unjoint_samples 500 joint_samples 0 [811964, 814042] processed_samples 500 unjoint_samples 500 joint_samples 0 [811964, 814042] processed_samples 500 unjoint_samples 500 joint_samples 0 [825460, 825457] processed_samples 500 unjoint_samples 500 joint_samples 0 [825460, 825457] processed_samples 500 unjoint_samples 500 joint_samples 0 [794504, 793557] processed_samples 500 unjoint_samples 500 joint_samples 0 [783045, 776336] processed_samples 500 unjoint_samples 500 joint_samples 0 [794504, 793557] processed_samples 500 unjoint_samples 500 joint_samples 0 [843524, 859784] processed_samples 500 unjoint_samples 500 joint_samples 0 [783045, 776336] processed_samples 500 unjoint_samples 500 joint_samples 0 [843524, 859784] processed_samples 500 unjoint_samples 500 joint_samples 0 [887228, 891147] processed_samples 500 unjoint_samples 500 joint_samples 0 [887228, 891147] processed_samples 500 unjoint_samples 500 joint_samples 0 [883313, 884942] processed_samples 500 unjoint_samples 500 joint_samples 0 [883313, 884942] [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure ..............................................................................................[h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a6e73980] mmco: unref short failure [h264 @ 0x55f3a6e73980] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure processed_samples 600 unjoint_samples 600 joint_samples 1 [1046805, 6444] processed_samples 600 unjoint_samples 600 joint_samples 1 [1046805, 6444] processed_samples 600 unjoint_samples 600 joint_samples 0 [921699, 933398] processed_samples 600 unjoint_samples 600 joint_samples 1 [1006678, 124289] processed_samples 600 unjoint_samples 600 joint_samples 0 [921699, 933398] processed_samples 600 unjoint_samples 600 joint_samples 1 [1006678, 124289] processed_samples 600 unjoint_samples 600 joint_samples 0 [919972, 917626] [h264 @ 0x55e9d2db4680] mmco: unref short failure processed_samples 600 unjoint_samples 600 joint_samples 0 [919972, 917626] [h264 @ 0x55f3ad7adc00] mmco: unref short failure processed_samples 600 unjoint_samples 600 joint_samples 0 [1007343, 989853] processed_samples 600 unjoint_samples 600 joint_samples 0 [978470, 999746] processed_samples 600 unjoint_samples 600 joint_samples 0 [978470, 999746] processed_samples 600 unjoint_samples 600 joint_samples 0 [1007343, 989853] processed_samples 600 unjoint_samples 600 joint_samples 0 [949230, 950073] processed_samples 600 unjoint_samples 600 joint_samples 0 [949230, 950073] processed_samples 600 unjoint_samples 600 joint_samples 0 [998436, 999809] processed_samples 600 unjoint_samples 600 joint_samples 0 [998436, 999809] [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d4fa9080] mmco: unref short failure [h264 @ 0x55e9d4fa9080] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d4fa9080] mmco: unref short failure [h264 @ 0x55e9d4fa9080] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure processed_samples 700 unjoint_samples 700 joint_samples 1 [1009819, 174104] processed_samples 700 unjoint_samples 700 joint_samples 1 [1009819, 174104] processed_samples 700 unjoint_samples 700 joint_samples 1 [1036189, 55859] processed_samples 700 unjoint_samples 700 joint_samples 1 [1036189, 55859] processed_samples 700 unjoint_samples 700 joint_samples 1 [1047066, 48898] processed_samples 700 unjoint_samples 700 joint_samples 1 [1047066, 48898] [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure processed_samples 700 unjoint_samples 700 joint_samples 1 [303501, 1047419] processed_samples 700 unjoint_samples 700 joint_samples 1 [303501, 1047419] processed_samples 700 unjoint_samples 700 joint_samples 1 [1046805, 341418] processed_samples 700 unjoint_samples 700 joint_samples 1 [1046805, 341418] [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure processed_samples 700 unjoint_samples 700 joint_samples 1 [1047065, 123641] processed_samples 700 unjoint_samples 700 joint_samples 1 [1047065, 123641] processed_samples 700 unjoint_samples 700 joint_samples 1 [313270, 1010707] processed_samples 700 unjoint_samples 700 joint_samples 1 [313270, 1010707] processed_samples 700 unjoint_samples 700 joint_samples 1 [1006678, 411151] processed_samples 700 unjoint_samples 700 joint_samples 1 [1006678, 411151] [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a6f63e00] mmco: unref short failure [h264 @ 0x55f3a6f63e00] mmco: unref short failure [h264 @ 0x55e9d2523300] mmco: unref short failure [h264 @ 0x55e9d2523300] mmco: unref short failure [h264 @ 0x55f3a6f63e00] mmco: unref short failure [h264 @ 0x55f3a6f63e00] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a6f63e00] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a83d4500] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure processed_samples 800 unjoint_samples 800 joint_samples 1 [662852, 1047419] processed_samples 800 unjoint_samples 800 joint_samples 1 [662852, 1047419] processed_samples 800 unjoint_samples 800 joint_samples 1 [1036189, 366771] processed_samples 800 unjoint_samples 800 joint_samples 1 [1036189, 366771] processed_samples 800 unjoint_samples 800 joint_samples 1 [1009819, 398610] processed_samples 800 unjoint_samples 800 joint_samples 1 [1009819, 398610] processed_samples 800 unjoint_samples 800 joint_samples 1 [1046805, 589495] processed_samples 800 unjoint_samples 800 joint_samples 1 [1046805, 589495] processed_samples 800 unjoint_samples 800 joint_samples 1 [732221, 1010707] processed_samples 800 unjoint_samples 800 joint_samples 1 [732221, 1010707] [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure processed_samples 800 unjoint_samples 800 joint_samples 1 [1047066, 281795] processed_samples 800 unjoint_samples 800 joint_samples 1 [1047066, 281795] processed_samples 800 unjoint_samples 800 joint_samples 1 [1047065, 416590] processed_samples 800 unjoint_samples 800 joint_samples 1 [1047065, 416590] processed_samples 800 unjoint_samples 800 joint_samples 1 [1006678, 848716] processed_samples 800 unjoint_samples 800 joint_samples 1 [1006678, 848716] [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a6f63e00] mmco: unref short failure [h264 @ 0x55f3a6f63e00] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a679f200] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d4cbe580] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d4663280] mmco: unref short failure [h264 @ 0x55f3aaf595c0] mmco: unref short failure processed_samples 900 unjoint_samples 900 joint_samples 2 [1045761, 54306] processed_samples 900 unjoint_samples 900 joint_samples 2 [1045761, 54306] processed_samples 900 unjoint_samples 900 joint_samples 1 [1047066, 641616] processed_samples 900 unjoint_samples 900 joint_samples 1 [1047066, 641616] processed_samples 900 unjoint_samples 900 joint_samples 1 [940218, 1047419] processed_samples 900 unjoint_samples 900 joint_samples 1 [940218, 1047419] [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure processed_samples 900 unjoint_samples 900 joint_samples 1 [1046805, 838941] processed_samples 900 unjoint_samples 900 joint_samples 1 [1046805, 838941] processed_samples 900 unjoint_samples 900 joint_samples 1 [1005989, 1010707] processed_samples 900 unjoint_samples 900 joint_samples 1 [1005989, 1010707] processed_samples 900 unjoint_samples 900 joint_samples 1 [1009819, 775474] processed_samples 900 unjoint_samples 900 joint_samples 1 [1009819, 775474] processed_samples 900 unjoint_samples 900 joint_samples 1 [1047065, 708642] processed_samples 900 unjoint_samples 900 joint_samples 1 [1047065, 708642] processed_samples 900 unjoint_samples 900 joint_samples 1 [1036189, 674775] processed_samples 900 unjoint_samples 900 joint_samples 1 [1036189, 674775] [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a679f200] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a679f200] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure processed_samples 1000 unjoint_samples 1000 joint_samples 2 [59379, 1046619] processed_samples 1000 unjoint_samples 1000 joint_samples 2 [61395, 1046433] processed_samples 1000 unjoint_samples 1000 joint_samples 2 [59379, 1046619] processed_samples 1000 unjoint_samples 1000 joint_samples 2 [61395, 1046433] processed_samples 1000 unjoint_samples 1000 joint_samples 2 [204174, 1048071] processed_samples 1000 unjoint_samples 1000 joint_samples 2 [204174, 1048071] processed_samples 1000 unjoint_samples 1000 joint_samples 2 [970718, 403105] processed_samples 1000 unjoint_samples 1000 joint_samples 2 [970718, 403105] processed_samples 1000 unjoint_samples 1000 joint_samples 1 [1036189, 966023] processed_samples 1000 unjoint_samples 1000 joint_samples 1 [1036189, 966023] processed_samples 1000 unjoint_samples 1000 joint_samples 2 [1045761, 417720] processed_samples 1000 unjoint_samples 1000 joint_samples 2 [1045761, 417720] processed_samples 1000 unjoint_samples 1000 joint_samples 2 [1047609, 2114] processed_samples 1000 unjoint_samples 1000 joint_samples 2 [1047609, 2114] processed_samples 1000 unjoint_samples 1000 joint_samples 1 [1037093, 1038094] processed_samples 1000 unjoint_samples 1000 joint_samples 1 [1037093, 1038094] [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55e9d34cfd80] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55e9d34cfd80] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a6b16bc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a6b16bc0] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a6b16bc0] mmco: unref short failure [h264 @ 0x55f3a6b16bc0] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55f3a6b16bc0] mmco: unref short failure [h264 @ 0x55f3a6b16bc0] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure processed_samples 1100 unjoint_samples 1100 joint_samples 2 [1045761, 714329] processed_samples 1100 unjoint_samples 1100 joint_samples 2 [1045761, 714329] processed_samples 1100 unjoint_samples 1100 joint_samples 2 [403769, 1046433] processed_samples 1100 unjoint_samples 1100 joint_samples 2 [403769, 1046433] processed_samples 1100 unjoint_samples 1100 joint_samples 2 [371496, 1046619] processed_samples 1100 unjoint_samples 1100 joint_samples 2 [371496, 1046619] processed_samples 1100 unjoint_samples 1100 joint_samples 2 [243943, 1035611] processed_samples 1100 unjoint_samples 1100 joint_samples 2 [283694, 1046918] processed_samples 1100 unjoint_samples 1100 joint_samples 2 [243943, 1035611] processed_samples 1100 unjoint_samples 1100 joint_samples 2 [283694, 1046918] processed_samples 1100 unjoint_samples 1100 joint_samples 2 [1047609, 298097] processed_samples 1100 unjoint_samples 1100 joint_samples 2 [1047609, 298097] processed_samples 1100 unjoint_samples 1100 joint_samples 2 [494388, 1048071] processed_samples 1100 unjoint_samples 1100 joint_samples 2 [494388, 1048071] processed_samples 1100 unjoint_samples 1100 joint_samples 2 [970718, 684351] processed_samples 1100 unjoint_samples 1100 joint_samples 2 [970718, 684351] [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d2389280] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure processed_samples 1200 unjoint_samples 1200 joint_samples 2 [573340, 1035611] processed_samples 1200 unjoint_samples 1200 joint_samples 2 [573340, 1035611] [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure processed_samples 1200 unjoint_samples 1200 joint_samples 2 [982961, 1000598] processed_samples 1200 unjoint_samples 1200 joint_samples 2 [576815, 1046918] processed_samples 1200 unjoint_samples 1200 joint_samples 2 [1047609, 629195] processed_samples 1200 unjoint_samples 1200 joint_samples 2 [795046, 1046433] processed_samples 1200 unjoint_samples 1200 joint_samples 2 [831441, 1048071] processed_samples 1200 unjoint_samples 1200 joint_samples 2 [982961, 1000598] processed_samples 1200 unjoint_samples 1200 joint_samples 2 [795046, 1046433] processed_samples 1200 unjoint_samples 1200 joint_samples 2 [576815, 1046918] processed_samples 1200 unjoint_samples 1200 joint_samples 2 [621470, 1046619] processed_samples 1200 unjoint_samples 1200 joint_samples 2 [1047609, 629195] processed_samples 1200 unjoint_samples 1200 joint_samples 2 [1045761, 993212] processed_samples 1200 unjoint_samples 1200 joint_samples 2 [831441, 1048071] processed_samples 1200 unjoint_samples 1200 joint_samples 2 [621470, 1046619] processed_samples 1200 unjoint_samples 1200 joint_samples 2 [1045761, 993212] [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure processed_samples 1300 unjoint_samples 1300 joint_samples 3 [1047443, 243201] processed_samples 1300 unjoint_samples 1300 joint_samples 3 [177992, 1048240] processed_samples 1300 unjoint_samples 1300 joint_samples 3 [1047443, 243201] processed_samples 1300 unjoint_samples 1300 joint_samples 3 [177992, 1048240] processed_samples 1300 unjoint_samples 1300 joint_samples 3 [1046642, 134501] processed_samples 1300 unjoint_samples 1300 joint_samples 3 [1046642, 134501] processed_samples 1300 unjoint_samples 1300 joint_samples 3 [1033749, 109020] processed_samples 1300 unjoint_samples 1300 joint_samples 2 [1047609, 852431] processed_samples 1300 unjoint_samples 1300 joint_samples 3 [1033749, 109020] processed_samples 1300 unjoint_samples 1300 joint_samples 2 [1047609, 852431] [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure processed_samples 1300 unjoint_samples 1300 joint_samples 2 [852078, 1046918] processed_samples 1300 unjoint_samples 1300 joint_samples 2 [852078, 1046918] processed_samples 1300 unjoint_samples 1300 joint_samples 2 [924001, 1046619] processed_samples 1300 unjoint_samples 1300 joint_samples 2 [924001, 1046619] processed_samples 1300 unjoint_samples 1300 joint_samples 2 [821455, 1035611] processed_samples 1300 unjoint_samples 1300 joint_samples 2 [821455, 1035611] [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d43cde40] mmco: unref short failure [h264 @ 0x55e9d43cde40] mmco: unref short failure [h264 @ 0x55f3aaff3800] mmco: unref short failure [h264 @ 0x55f3aaff3800] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure processed_samples 1400 unjoint_samples 1400 joint_samples 3 [125020, 1030986] processed_samples 1400 unjoint_samples 1400 joint_samples 3 [1046642, 453631] processed_samples 1400 unjoint_samples 1400 joint_samples 3 [125020, 1030986] processed_samples 1400 unjoint_samples 1400 joint_samples 3 [1046642, 453631] processed_samples 1400 unjoint_samples 1400 joint_samples 3 [1034564, 221078] processed_samples 1400 unjoint_samples 1400 joint_samples 3 [1034564, 221078] processed_samples 1400 unjoint_samples 1400 joint_samples 3 [221505, 1046918] processed_samples 1400 unjoint_samples 1400 joint_samples 3 [221505, 1046918] processed_samples 1400 unjoint_samples 1400 joint_samples 3 [1047443, 598737] processed_samples 1400 unjoint_samples 1400 joint_samples 3 [1047443, 598737] processed_samples 1400 unjoint_samples 1400 joint_samples 3 [549978, 1048240] processed_samples 1400 unjoint_samples 1400 joint_samples 3 [549978, 1048240] processed_samples 1400 unjoint_samples 1400 joint_samples 3 [1033749, 499153] processed_samples 1400 unjoint_samples 1400 joint_samples 3 [1033749, 499153] processed_samples 1400 unjoint_samples 1400 joint_samples 2 [1047479, 1048318] processed_samples 1400 unjoint_samples 1400 joint_samples 2 [1047479, 1048318] [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d34cfd80] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d34cfd80] mmco: unref short failure [h264 @ 0x55e9d34cfd80] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d378a740] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure processed_samples 1500 unjoint_samples 1500 joint_samples 3 [1047479, 458879] processed_samples 1500 unjoint_samples 1500 joint_samples 3 [1047479, 458879] processed_samples 1500 unjoint_samples 1500 joint_samples 3 [1033749, 986256] processed_samples 1500 unjoint_samples 1500 joint_samples 3 [896294, 1048240] processed_samples 1500 unjoint_samples 1500 joint_samples 3 [1033749, 986256] processed_samples 1500 unjoint_samples 1500 joint_samples 3 [896294, 1048240] processed_samples 1500 unjoint_samples 1500 joint_samples 3 [1046642, 794093] processed_samples 1500 unjoint_samples 1500 joint_samples 3 [1046642, 794093] processed_samples 1500 unjoint_samples 1500 joint_samples 3 [468186, 1046918] processed_samples 1500 unjoint_samples 1500 joint_samples 3 [468186, 1046918] processed_samples 1500 unjoint_samples 1500 joint_samples 3 [398412, 1030986] processed_samples 1500 unjoint_samples 1500 joint_samples 3 [398412, 1030986] processed_samples 1500 unjoint_samples 1500 joint_samples 3 [1034564, 547949] processed_samples 1500 unjoint_samples 1500 joint_samples 3 [1034564, 547949] processed_samples 1500 unjoint_samples 1500 joint_samples 3 [1047443, 851156] processed_samples 1500 unjoint_samples 1500 joint_samples 3 [1047443, 851156] [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d2389280] mmco: unref short failure [h264 @ 0x55e9d2389280] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure processed_samples 1600 unjoint_samples 1600 joint_samples 4 [1046642, 59914] processed_samples 1600 unjoint_samples 1600 joint_samples 4 [1046642, 59914] processed_samples 1600 unjoint_samples 1600 joint_samples 4 [131166, 1039676] processed_samples 1600 unjoint_samples 1600 joint_samples 4 [131166, 1039676] processed_samples 1600 unjoint_samples 1600 joint_samples 4 [1048073, 189561] processed_samples 1600 unjoint_samples 1600 joint_samples 4 [1048073, 189561] processed_samples 1600 unjoint_samples 1600 joint_samples 4 [235315, 1042397] processed_samples 1600 unjoint_samples 1600 joint_samples 4 [235315, 1042397] processed_samples 1600 unjoint_samples 1600 joint_samples 3 [1047479, 779403] processed_samples 1600 unjoint_samples 1600 joint_samples 3 [1047479, 779403] processed_samples 1600 unjoint_samples 1600 joint_samples 3 [707088, 1046918] processed_samples 1600 unjoint_samples 1600 joint_samples 3 [707088, 1046918] processed_samples 1600 unjoint_samples 1600 joint_samples 3 [721470, 1030986] processed_samples 1600 unjoint_samples 1600 joint_samples 3 [721470, 1030986] processed_samples 1600 unjoint_samples 1600 joint_samples 3 [1034564, 811640] processed_samples 1600 unjoint_samples 1600 joint_samples 3 [1034564, 811640] [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure processed_samples 1700 unjoint_samples 1700 joint_samples 4 [1046642, 394741] processed_samples 1700 unjoint_samples 1700 joint_samples 4 [554618, 1039676] processed_samples 1700 unjoint_samples 1700 joint_samples 4 [524946, 1042397] processed_samples 1700 unjoint_samples 1700 joint_samples 4 [1048073, 495943] processed_samples 1700 unjoint_samples 1700 joint_samples 4 [554618, 1039676] processed_samples 1700 unjoint_samples 1700 joint_samples 4 [1048073, 495943] processed_samples 1700 unjoint_samples 1700 joint_samples 4 [1046642, 394741] processed_samples 1700 unjoint_samples 1700 joint_samples 4 [524946, 1042397] processed_samples 1700 unjoint_samples 1700 joint_samples 3 [1032392, 1046918] processed_samples 1700 unjoint_samples 1700 joint_samples 4 [1036903, 106006] processed_samples 1700 unjoint_samples 1700 joint_samples 4 [1036903, 106006] processed_samples 1700 unjoint_samples 1700 joint_samples 3 [1032392, 1046918] processed_samples 1700 unjoint_samples 1700 joint_samples 4 [112922, 1034841] processed_samples 1700 unjoint_samples 1700 joint_samples 4 [112922, 1034841] [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure processed_samples 1700 unjoint_samples 1700 joint_samples 3 [973142, 1030986] processed_samples 1700 unjoint_samples 1700 joint_samples 3 [973142, 1030986] [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d43cde40] mmco: unref short failure [h264 @ 0x55e9d43cde40] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure processed_samples 1800 unjoint_samples 1800 joint_samples 4 [302681, 1047230] processed_samples 1800 unjoint_samples 1800 joint_samples 4 [302681, 1047230] [h264 @ 0x55e9d9362440] mmco: unref short failure processed_samples 1800 unjoint_samples 1800 joint_samples 4 [217977, 1045261] processed_samples 1800 unjoint_samples 1800 joint_samples 4 [487474, 1034841] processed_samples 1800 unjoint_samples 1800 joint_samples 4 [1036903, 387406] processed_samples 1800 unjoint_samples 1800 joint_samples 4 [487474, 1034841] processed_samples 1800 unjoint_samples 1800 joint_samples 4 [1046642, 665489] processed_samples 1800 unjoint_samples 1800 joint_samples 4 [217977, 1045261] processed_samples 1800 unjoint_samples 1800 joint_samples 4 [1036903, 387406] processed_samples 1800 unjoint_samples 1800 joint_samples 4 [988528, 1039676] processed_samples 1800 unjoint_samples 1800 joint_samples 4 [1046642, 665489] processed_samples 1800 unjoint_samples 1800 joint_samples 4 [988528, 1039676] [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure processed_samples 1800 unjoint_samples 1800 joint_samples 4 [1048073, 839595] processed_samples 1800 unjoint_samples 1800 joint_samples 4 [1048073, 839595] [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure processed_samples 1800 unjoint_samples 1800 joint_samples 4 [785664, 1042397] processed_samples 1800 unjoint_samples 1800 joint_samples 4 [785664, 1042397] [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aaff3800] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aaff3800] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure processed_samples 1900 unjoint_samples 1900 joint_samples 5 [1045900, 192164] processed_samples 1900 unjoint_samples 1900 joint_samples 5 [1045900, 192164] processed_samples 1900 unjoint_samples 1900 joint_samples 4 [878180, 1034841] processed_samples 1900 unjoint_samples 1900 joint_samples 4 [878180, 1034841] processed_samples 1900 unjoint_samples 1900 joint_samples 5 [90508, 1042397] processed_samples 1900 unjoint_samples 1900 joint_samples 5 [90508, 1042397] processed_samples 1900 unjoint_samples 1900 joint_samples 5 [66404, 1047909] processed_samples 1900 unjoint_samples 1900 joint_samples 5 [66404, 1047909] processed_samples 1900 unjoint_samples 1900 joint_samples 5 [1046681, 53017] processed_samples 1900 unjoint_samples 1900 joint_samples 5 [1046681, 53017] processed_samples 1900 unjoint_samples 1900 joint_samples 4 [713915, 1047230] processed_samples 1900 unjoint_samples 1900 joint_samples 4 [713915, 1047230] processed_samples 1900 unjoint_samples 1900 joint_samples 4 [628006, 1045261] processed_samples 1900 unjoint_samples 1900 joint_samples 4 [628006, 1045261] processed_samples 1900 unjoint_samples 1900 joint_samples 4 [1036903, 679574] processed_samples 1900 unjoint_samples 1900 joint_samples 4 [1036903, 679574] [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3ae983680] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3a726e5c0] mmco: unref short failure [h264 @ 0x55f3a726e5c0] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a726e5c0] mmco: unref short failure [h264 @ 0x55f3a726e5c0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a726e5c0] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a726e5c0] mmco: unref short failure [h264 @ 0x55f3a726e5c0] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a7621680] mmco: unref short failure [h264 @ 0x55f3a7621680] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure processed_samples 2000 unjoint_samples 2000 joint_samples 5 [1045900, 492645] processed_samples 2000 unjoint_samples 2000 joint_samples 5 [1047871, 99880] processed_samples 2000 unjoint_samples 2000 joint_samples 5 [1045900, 492645] processed_samples 2000 unjoint_samples 2000 joint_samples 5 [1047871, 99880] processed_samples 2000 unjoint_samples 2000 joint_samples 5 [113655, 1047194] processed_samples 2000 unjoint_samples 2000 joint_samples 5 [113655, 1047194] processed_samples 2000 unjoint_samples 2000 joint_samples 5 [1046681, 421235] processed_samples 2000 unjoint_samples 2000 joint_samples 5 [437413, 1042397] processed_samples 2000 unjoint_samples 2000 joint_samples 5 [1046681, 421235] processed_samples 2000 unjoint_samples 2000 joint_samples 5 [327280, 1047909] processed_samples 2000 unjoint_samples 2000 joint_samples 5 [437413, 1042397] processed_samples 2000 unjoint_samples 2000 joint_samples 5 [327280, 1047909] [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure processed_samples 2000 unjoint_samples 2000 joint_samples 5 [1036903, 25942] processed_samples 2000 unjoint_samples 2000 joint_samples 5 [1036903, 25942] processed_samples 2000 unjoint_samples 2000 joint_samples 4 [1011979, 1045261] processed_samples 2000 unjoint_samples 2000 joint_samples 4 [1011979, 1045261] [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3ae983680] mmco: unref short failure [h264 @ 0x55f3ae983680] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure processed_samples 2100 unjoint_samples 2100 joint_samples 5 [382504, 1047194] processed_samples 2100 unjoint_samples 2100 joint_samples 5 [382504, 1047194] processed_samples 2100 unjoint_samples 2100 joint_samples 5 [1046681, 746843] processed_samples 2100 unjoint_samples 2100 joint_samples 5 [1046681, 746843] processed_samples 2100 unjoint_samples 2100 joint_samples 5 [1036903, 433299] processed_samples 2100 unjoint_samples 2100 joint_samples 5 [1036903, 433299] processed_samples 2100 unjoint_samples 2100 joint_samples 5 [618149, 1047909] processed_samples 2100 unjoint_samples 2100 joint_samples 5 [1047871, 425489] processed_samples 2100 unjoint_samples 2100 joint_samples 5 [1047871, 425489] processed_samples 2100 unjoint_samples 2100 joint_samples 5 [618149, 1047909] processed_samples 2100 unjoint_samples 2100 joint_samples 5 [1025593, 388193] processed_samples 2100 unjoint_samples 2100 joint_samples 5 [1025593, 388193] processed_samples 2100 unjoint_samples 2100 joint_samples 5 [764313, 1042397] processed_samples 2100 unjoint_samples 2100 joint_samples 5 [764313, 1042397] processed_samples 2100 unjoint_samples 2100 joint_samples 5 [1045900, 844307] processed_samples 2100 unjoint_samples 2100 joint_samples 5 [1045900, 844307] [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a6dd16c0] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure processed_samples 2200 unjoint_samples 2200 joint_samples 6 [70882, 1029265] processed_samples 2200 unjoint_samples 2200 joint_samples 6 [70882, 1029265] [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure processed_samples 2200 unjoint_samples 2200 joint_samples 5 [1047871, 746187] processed_samples 2200 unjoint_samples 2200 joint_samples 5 [1047871, 746187] [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure processed_samples 2200 unjoint_samples 2200 joint_samples 5 [1036903, 732500] processed_samples 2200 unjoint_samples 2200 joint_samples 5 [1036903, 732500] processed_samples 2200 unjoint_samples 2200 joint_samples 6 [1019411, 141357] processed_samples 2200 unjoint_samples 2200 joint_samples 6 [1019411, 141357] processed_samples 2200 unjoint_samples 2200 joint_samples 5 [690893, 1047194] processed_samples 2200 unjoint_samples 2200 joint_samples 5 [690893, 1047194] processed_samples 2200 unjoint_samples 2200 joint_samples 6 [217835, 1035969] processed_samples 2200 unjoint_samples 2200 joint_samples 6 [217835, 1035969] processed_samples 2200 unjoint_samples 2200 joint_samples 5 [1025593, 727181] processed_samples 2200 unjoint_samples 2200 joint_samples 5 [1025593, 727181] [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure processed_samples 2200 unjoint_samples 2200 joint_samples 5 [922247, 1047909] processed_samples 2200 unjoint_samples 2200 joint_samples 5 [922247, 1047909] [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d4f56680] mmco: unref short failure [h264 @ 0x55e9d4f56680] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55e9d32c2340] mmco: unref short failure [h264 @ 0x55e9d32c2340] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure processed_samples 2300 unjoint_samples 2300 joint_samples 6 [1047794, 205817] processed_samples 2300 unjoint_samples 2300 joint_samples 6 [516171, 1029265] processed_samples 2300 unjoint_samples 2300 joint_samples 6 [1047794, 205817] processed_samples 2300 unjoint_samples 2300 joint_samples 6 [516171, 1029265] processed_samples 2300 unjoint_samples 2300 joint_samples 6 [1042887, 52989] processed_samples 2300 unjoint_samples 2300 joint_samples 6 [1042887, 52989] processed_samples 2300 unjoint_samples 2300 joint_samples 6 [1019411, 577040] processed_samples 2300 unjoint_samples 2300 joint_samples 6 [1019411, 577040] processed_samples 2300 unjoint_samples 2300 joint_samples 5 [1047871, 989496] processed_samples 2300 unjoint_samples 2300 joint_samples 5 [1047871, 989496] processed_samples 2300 unjoint_samples 2300 joint_samples 6 [512373, 1035969] processed_samples 2300 unjoint_samples 2300 joint_samples 6 [512373, 1035969] processed_samples 2300 unjoint_samples 2300 joint_samples 5 [969748, 1047194] processed_samples 2300 unjoint_samples 2300 joint_samples 5 [969748, 1047194] processed_samples 2300 unjoint_samples 2300 joint_samples 6 [409, 1048422] processed_samples 2300 unjoint_samples 2300 joint_samples 6 [409, 1048422] [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d09bb780] mmco: unref short failure [h264 @ 0x55e9d09bb780] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a6aa17c0] mmco: unref short failure [h264 @ 0x55f3a6aa17c0] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a679f200] mmco: unref short failure [h264 @ 0x55f3a679f200] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55e9d32c2340] mmco: unref short failure [h264 @ 0x55e9d32c2340] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aaff3800] mmco: unref short failure [h264 @ 0x55f3aaff3800] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure processed_samples 2400 unjoint_samples 2400 joint_samples 6 [202958, 1047194] processed_samples 2400 unjoint_samples 2400 joint_samples 6 [1047871, 272112] processed_samples 2400 unjoint_samples 2400 joint_samples 6 [1047871, 272112] processed_samples 2400 unjoint_samples 2400 joint_samples 6 [202958, 1047194] processed_samples 2400 unjoint_samples 2400 joint_samples 7 [83980, 1021551] processed_samples 2400 unjoint_samples 2400 joint_samples 7 [83980, 1021551] processed_samples 2400 unjoint_samples 2400 joint_samples 6 [345292, 1048422] processed_samples 2400 unjoint_samples 2400 joint_samples 6 [345292, 1048422] processed_samples 2400 unjoint_samples 2400 joint_samples 6 [1042887, 458137] processed_samples 2400 unjoint_samples 2400 joint_samples 6 [1042887, 458137] processed_samples 2400 unjoint_samples 2400 joint_samples 6 [951455, 1029265] processed_samples 2400 unjoint_samples 2400 joint_samples 6 [951455, 1029265] processed_samples 2400 unjoint_samples 2400 joint_samples 6 [840968, 1035969] processed_samples 2400 unjoint_samples 2400 joint_samples 6 [840968, 1035969] processed_samples 2400 unjoint_samples 2400 joint_samples 6 [1047794, 480238] processed_samples 2400 unjoint_samples 2400 joint_samples 6 [1047794, 480238] [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d09bb780] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d378a740] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a6f63e00] mmco: unref short failure [h264 @ 0x55e9d2389280] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d32c2340] mmco: unref short failure [h264 @ 0x55e9d32c2340] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55e9d32c2340] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure processed_samples 2500 unjoint_samples 2500 joint_samples 7 [343159, 1021551] processed_samples 2500 unjoint_samples 2500 joint_samples 7 [343159, 1021551] processed_samples 2500 unjoint_samples 2500 joint_samples 7 [1022012, 149298] processed_samples 2500 unjoint_samples 2500 joint_samples 7 [1022012, 149298] processed_samples 2500 unjoint_samples 2500 joint_samples 7 [1047670, 268192] processed_samples 2500 unjoint_samples 2500 joint_samples 7 [1047670, 268192] processed_samples 2500 unjoint_samples 2500 joint_samples 6 [517332, 1047194] processed_samples 2500 unjoint_samples 2500 joint_samples 6 [517332, 1047194] processed_samples 2500 unjoint_samples 2500 joint_samples 6 [1042887, 707380] processed_samples 2500 unjoint_samples 2500 joint_samples 6 [1042887, 707380] processed_samples 2500 unjoint_samples 2500 joint_samples 6 [1047871, 611625] processed_samples 2500 unjoint_samples 2500 joint_samples 6 [1047871, 611625] processed_samples 2500 unjoint_samples 2500 joint_samples 6 [1047794, 851870] processed_samples 2500 unjoint_samples 2500 joint_samples 6 [1047794, 851870] processed_samples 2500 unjoint_samples 2500 joint_samples 6 [832090, 1048422] processed_samples 2500 unjoint_samples 2500 joint_samples 6 [832090, 1048422] [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d568b380] mmco: unref short failure [h264 @ 0x55e9d568b380] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d568b380] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d8046940] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure processed_samples 2600 unjoint_samples 2600 joint_samples 7 [1022012, 455681] processed_samples 2600 unjoint_samples 2600 joint_samples 7 [1022012, 455681] processed_samples 2600 unjoint_samples 2600 joint_samples 7 [190504, 1013517] processed_samples 2600 unjoint_samples 2600 joint_samples 6 [835759, 1047194] processed_samples 2600 unjoint_samples 2600 joint_samples 7 [190504, 1013517] [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure processed_samples 2600 unjoint_samples 2600 joint_samples 7 [1047670, 661896] [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure processed_samples 2600 unjoint_samples 2600 joint_samples 6 [835759, 1047194] processed_samples 2600 unjoint_samples 2600 joint_samples 7 [642285, 1021551] processed_samples 2600 unjoint_samples 2600 joint_samples 7 [1047670, 661896] processed_samples 2600 unjoint_samples 2600 joint_samples 7 [642285, 1021551] processed_samples 2600 unjoint_samples 2600 joint_samples 7 [1028376, 192715] processed_samples 2600 unjoint_samples 2600 joint_samples 7 [1028376, 192715] processed_samples 2600 unjoint_samples 2600 joint_samples 6 [1047871, 869587] processed_samples 2600 unjoint_samples 2600 joint_samples 6 [1047871, 869587] processed_samples 2600 unjoint_samples 2600 joint_samples 6 [1042887, 969916] processed_samples 2600 unjoint_samples 2600 joint_samples 6 [1042887, 969916] [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d378a740] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure processed_samples 2700 unjoint_samples 2700 joint_samples 7 [94413, 1041391] processed_samples 2700 unjoint_samples 2700 joint_samples 7 [94413, 1041391] processed_samples 2700 unjoint_samples 2700 joint_samples 7 [1046684, 93270] processed_samples 2700 unjoint_samples 2700 joint_samples 7 [1046684, 93270] processed_samples 2700 unjoint_samples 2700 joint_samples 7 [952613, 1021551] processed_samples 2700 unjoint_samples 2700 joint_samples 7 [952613, 1021551] processed_samples 2700 unjoint_samples 2700 joint_samples 7 [276408, 1030298] processed_samples 2700 unjoint_samples 2700 joint_samples 7 [276408, 1030298] processed_samples 2700 unjoint_samples 2700 joint_samples 7 [1028376, 487435] processed_samples 2700 unjoint_samples 2700 joint_samples 7 [1028376, 487435] processed_samples 2700 unjoint_samples 2700 joint_samples 7 [591606, 1013517] processed_samples 2700 unjoint_samples 2700 joint_samples 7 [591606, 1013517] processed_samples 2700 unjoint_samples 2700 joint_samples 7 [1022012, 719491] processed_samples 2700 unjoint_samples 2700 joint_samples 7 [1022012, 719491] processed_samples 2700 unjoint_samples 2700 joint_samples 8 [1047670, 2105] processed_samples 2700 unjoint_samples 2700 joint_samples 8 [1047670, 2105] [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d2636880] mmco: unref short failure [h264 @ 0x55e9d2636880] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure processed_samples 2800 unjoint_samples 2800 joint_samples 8 [1045554, 135897] processed_samples 2800 unjoint_samples 2800 joint_samples 8 [1045554, 135897] [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure processed_samples 2800 unjoint_samples 2800 joint_samples 8 [1023933, 138893] processed_samples 2800 unjoint_samples 2800 joint_samples 8 [1023933, 138893] processed_samples 2800 unjoint_samples 2800 joint_samples 7 [330644, 1041391] processed_samples 2800 unjoint_samples 2800 joint_samples 7 [330644, 1041391] processed_samples 2800 unjoint_samples 2800 joint_samples 7 [616910, 1030298] processed_samples 2800 unjoint_samples 2800 joint_samples 7 [616910, 1030298] processed_samples 2800 unjoint_samples 2800 joint_samples 8 [1047670, 285080] processed_samples 2800 unjoint_samples 2800 joint_samples 8 [1047670, 285080] processed_samples 2800 unjoint_samples 2800 joint_samples 7 [1028376, 812561] processed_samples 2800 unjoint_samples 2800 joint_samples 7 [1028376, 812561] processed_samples 2800 unjoint_samples 2800 joint_samples 7 [896670, 1013517] processed_samples 2800 unjoint_samples 2800 joint_samples 7 [896670, 1013517] processed_samples 2800 unjoint_samples 2800 joint_samples 7 [1046684, 463131] processed_samples 2800 unjoint_samples 2800 joint_samples 7 [1046684, 463131] [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d2636880] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure processed_samples 2900 unjoint_samples 2900 joint_samples 7 [626955, 1041391] processed_samples 2900 unjoint_samples 2900 joint_samples 8 [1045554, 439025] processed_samples 2900 unjoint_samples 2900 joint_samples 8 [1047299, 130492] processed_samples 2900 unjoint_samples 2900 joint_samples 8 [1045554, 439025] processed_samples 2900 unjoint_samples 2900 joint_samples 7 [626955, 1041391] processed_samples 2900 unjoint_samples 2900 joint_samples 8 [1047299, 130492] processed_samples 2900 unjoint_samples 2900 joint_samples 8 [1023933, 360868] processed_samples 2900 unjoint_samples 2900 joint_samples 8 [1023933, 360868] processed_samples 2900 unjoint_samples 2900 joint_samples 8 [1047670, 690009] processed_samples 2900 unjoint_samples 2900 joint_samples 8 [1047670, 690009] processed_samples 2900 unjoint_samples 2900 joint_samples 8 [1046684, 2105] processed_samples 2900 unjoint_samples 2900 joint_samples 8 [1046684, 2105] processed_samples 2900 unjoint_samples 2900 joint_samples 7 [1028376, 1018352] processed_samples 2900 unjoint_samples 2900 joint_samples 7 [1028376, 1018352] processed_samples 2900 unjoint_samples 2900 joint_samples 7 [997427, 1030298] processed_samples 2900 unjoint_samples 2900 joint_samples 7 [997427, 1030298] [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a85816c0] [h264 @ 0x55e9d4486ac0] mmco: unref short failure mmco: unref short failure [h264 @ 0x55e9d4486ac0] [h264 @ 0x55f3a85816c0] mmco: unref short failure mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure processed_samples 3000 unjoint_samples 3000 joint_samples 8 [1046684, 494756] processed_samples 3000 unjoint_samples 3000 joint_samples 8 [1046684, 494756] processed_samples 3000 unjoint_samples 3000 joint_samples 9 [38718, 1047007] processed_samples 3000 unjoint_samples 3000 joint_samples 9 [38718, 1047007] [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure processed_samples 3000 unjoint_samples 3000 joint_samples 7 [949141, 1041391] processed_samples 3000 unjoint_samples 3000 joint_samples 7 [949141, 1041391] processed_samples 3000 unjoint_samples 3000 joint_samples 8 [1023933, 765511] processed_samples 3000 unjoint_samples 3000 joint_samples 8 [1023933, 765511] processed_samples 3000 unjoint_samples 3000 joint_samples 8 [1038818, 264412] processed_samples 3000 unjoint_samples 3000 joint_samples 8 [1038818, 264412] processed_samples 3000 unjoint_samples 3000 joint_samples 8 [1047299, 371044] processed_samples 3000 unjoint_samples 3000 joint_samples 8 [1047299, 371044] [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure processed_samples 3000 unjoint_samples 3000 joint_samples 8 [1037652, 291833] processed_samples 3000 unjoint_samples 3000 joint_samples 8 [1037652, 291833] processed_samples 3000 unjoint_samples 3000 joint_samples 8 [1045554, 831562] processed_samples 3000 unjoint_samples 3000 joint_samples 8 [1045554, 831562] [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d2389280] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3ad7ec440] mmco: unref short failure [h264 @ 0x55f3ad7ec440] mmco: unref short failure [h264 @ 0x55f3ad7ec440] mmco: unref short failure [h264 @ 0x55f3ad7ec440] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure processed_samples 3100 unjoint_samples 3100 joint_samples 9 [1047859, 90547] processed_samples 3100 unjoint_samples 3100 joint_samples 9 [1047859, 90547] processed_samples 3100 unjoint_samples 3100 joint_samples 8 [1047208, 169060] processed_samples 3100 unjoint_samples 3100 joint_samples 8 [1047208, 169060] processed_samples 3100 unjoint_samples 3100 joint_samples 9 [320373, 1047007] processed_samples 3100 unjoint_samples 3100 joint_samples 9 [320373, 1047007] processed_samples 3100 unjoint_samples 3100 joint_samples 8 [1046684, 790416] processed_samples 3100 unjoint_samples 3100 joint_samples 8 [1046684, 790416] processed_samples 3100 unjoint_samples 3100 joint_samples 9 [1034302, 27753] processed_samples 3100 unjoint_samples 3100 joint_samples 9 [1034302, 27753] processed_samples 3100 unjoint_samples 3100 joint_samples 8 [1038818, 608690] processed_samples 3100 unjoint_samples 3100 joint_samples 8 [1038818, 608690] processed_samples 3100 unjoint_samples 3100 joint_samples 8 [1037652, 570636] processed_samples 3100 unjoint_samples 3100 joint_samples 8 [1037652, 570636] processed_samples 3100 unjoint_samples 3100 joint_samples 8 [1047299, 674367] processed_samples 3100 unjoint_samples 3100 joint_samples 8 [1047299, 674367] [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d446c1c0] mmco: unref short failure [h264 @ 0x55e9d446c1c0] mmco: unref short failure [h264 @ 0x55e9d446c1c0] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d4176e00] mmco: unref short failure processed_samples 3200 unjoint_samples 3200 joint_samples 9 [1047859, 345803] processed_samples 3200 unjoint_samples 3200 joint_samples 9 [1047859, 345803] [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure processed_samples 3200 unjoint_samples 3200 joint_samples 9 [64799, 1022825] processed_samples 3200 unjoint_samples 3200 joint_samples 9 [64799, 1022825] processed_samples 3200 unjoint_samples 3200 joint_samples 9 [1034302, 258447] processed_samples 3200 unjoint_samples 3200 joint_samples 9 [1034302, 258447] processed_samples 3200 unjoint_samples 3200 joint_samples 8 [1047208, 508101] processed_samples 3200 unjoint_samples 3200 joint_samples 8 [1047208, 508101] processed_samples 3200 unjoint_samples 3200 joint_samples 9 [554460, 1047007] processed_samples 3200 unjoint_samples 3200 joint_samples 9 [554460, 1047007] processed_samples 3200 unjoint_samples 3200 joint_samples 8 [1037652, 919074] processed_samples 3200 unjoint_samples 3200 joint_samples 8 [1037652, 919074] processed_samples 3200 unjoint_samples 3200 joint_samples 8 [1038818, 972727] processed_samples 3200 unjoint_samples 3200 joint_samples 8 [1038818, 972727] processed_samples 3200 unjoint_samples 3200 joint_samples 8 [1047299, 999512] [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure processed_samples 3200 unjoint_samples 3200 joint_samples 8 [1047299, 999512] [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a67ce700] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4f56680] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aaff3800] mmco: unref short failure [h264 @ 0x55f3aaff3800] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure processed_samples 3300 unjoint_samples 3300 joint_samples 9 [424985, 1022825] processed_samples 3300 unjoint_samples 3300 joint_samples 9 [424985, 1022825] processed_samples 3300 unjoint_samples 3300 joint_samples 9 [1047299, 252052] processed_samples 3300 unjoint_samples 3300 joint_samples 9 [1047299, 252052] processed_samples 3300 unjoint_samples 3300 joint_samples 9 [245880, 1028292] processed_samples 3300 unjoint_samples 3300 joint_samples 9 [1047859, 677635] processed_samples 3300 unjoint_samples 3300 joint_samples 9 [245880, 1028292] processed_samples 3300 unjoint_samples 3300 joint_samples 9 [1047859, 677635] processed_samples 3300 unjoint_samples 3300 joint_samples 9 [1045918, 181744] processed_samples 3300 unjoint_samples 3300 joint_samples 9 [1045918, 181744] processed_samples 3300 unjoint_samples 3300 joint_samples 8 [1047208, 739224] processed_samples 3300 unjoint_samples 3300 joint_samples 8 [1047208, 739224] processed_samples 3300 unjoint_samples 3300 joint_samples 9 [1034302, 553830] processed_samples 3300 unjoint_samples 3300 joint_samples 9 [1034302, 553830] [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure processed_samples 3300 unjoint_samples 3300 joint_samples 9 [808330, 1047007] processed_samples 3300 unjoint_samples 3300 joint_samples 9 [808330, 1047007] [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d3bedb40] mmco: unref short failure [h264 @ 0x55e9d3bedb40] mmco: unref short failure [h264 @ 0x55f3a76806c0] mmco: unref short failure [h264 @ 0x55f3a76806c0] mmco: unref short failure [h264 @ 0x55f3a76806c0] mmco: unref short failure [h264 @ 0x55e9d3bedb40] mmco: unref short failure [h264 @ 0x55f3a76806c0] mmco: unref short failure [h264 @ 0x55f3a76806c0] mmco: unref short failure [h264 @ 0x55e9d3bedb40] mmco: unref short failure [h264 @ 0x55e9d3bedb40] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d568b380] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55e9d2389280] mmco: unref short failure processed_samples 3400 unjoint_samples 3400 joint_samples 9 [658742, 1022825] processed_samples 3400 unjoint_samples 3400 joint_samples 9 [658742, 1022825] processed_samples 3400 unjoint_samples 3400 joint_samples 9 [564927, 1028292] processed_samples 3400 unjoint_samples 3400 joint_samples 9 [1047859, 992468] processed_samples 3400 unjoint_samples 3400 joint_samples 9 [1045918, 565181] processed_samples 3400 unjoint_samples 3400 joint_samples 9 [564927, 1028292] processed_samples 3400 unjoint_samples 3400 joint_samples 9 [1047859, 992468] processed_samples 3400 unjoint_samples 3400 joint_samples 9 [1045918, 565181] processed_samples 3400 unjoint_samples 3400 joint_samples 10 [57179, 1047007] processed_samples 3400 unjoint_samples 3400 joint_samples 10 [57179, 1047007] processed_samples 3400 unjoint_samples 3400 joint_samples 8 [1047208, 1029599] processed_samples 3400 unjoint_samples 3400 joint_samples 8 [1047208, 1029599] processed_samples 3400 unjoint_samples 3400 joint_samples 9 [1047299, 487223] processed_samples 3400 unjoint_samples 3400 joint_samples 9 [1047299, 487223] processed_samples 3400 unjoint_samples 3400 joint_samples 9 [1034302, 993952] processed_samples 3400 unjoint_samples 3400 joint_samples 9 [1034302, 993952] [h264 @ 0x55f3a6779680] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a6779680] mmco: unref short failure [h264 @ 0x55f3a6779680] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a6779680] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa9f2940] mmco: unref short failure [h264 @ 0x55f3aa9f2940] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a6779680] mmco: unref short failure [h264 @ 0x55f3a6779680] mmco: unref short failure processed_samples 3500 unjoint_samples 3500 joint_samples 10 [302892, 1047007] processed_samples 3500 unjoint_samples 3500 joint_samples 10 [302892, 1047007] processed_samples 3500 unjoint_samples 3500 joint_samples 10 [1047859, 260642] processed_samples 3500 unjoint_samples 3500 joint_samples 10 [1047859, 260642] processed_samples 3500 unjoint_samples 3500 joint_samples 9 [244260, 1046177] processed_samples 3500 unjoint_samples 3500 joint_samples 9 [244260, 1046177] processed_samples 3500 unjoint_samples 3500 joint_samples 10 [213860, 1047635] processed_samples 3500 unjoint_samples 3500 joint_samples 10 [213860, 1047635] processed_samples 3500 unjoint_samples 3500 joint_samples 9 [955276, 1028292] processed_samples 3500 unjoint_samples 3500 joint_samples 9 [955276, 1028292] processed_samples 3500 unjoint_samples 3500 joint_samples 9 [872748, 1022825] processed_samples 3500 unjoint_samples 3500 joint_samples 9 [872748, 1022825] [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure processed_samples 3500 unjoint_samples 3500 joint_samples 9 [1047299, 802311] processed_samples 3500 unjoint_samples 3500 joint_samples 9 [1047299, 802311] processed_samples 3500 unjoint_samples 3500 joint_samples 9 [1045918, 888214] processed_samples 3500 unjoint_samples 3500 joint_samples 9 [1045918, 888214] [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d654b3c0] mmco: unref short failure [h264 @ 0x55f3aa9f2940] mmco: unref short failure [h264 @ 0x55e9d117aec0] mmco: unref short failure [h264 @ 0x55f3a6726540] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa9f2940] mmco: unref short failure [h264 @ 0x55f3aa9f2940] mmco: unref short failure [h264 @ 0x55f3aa9f2940] mmco: unref short failure [h264 @ 0x55f3aa9f2940] mmco: unref short failure [h264 @ 0x55f3aa9f2940] mmco: unref short failure [h264 @ 0x55f3aa9f2940] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a6726540] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure processed_samples 3600 unjoint_samples 3600 joint_samples 10 [203115, 1036443] processed_samples 3600 unjoint_samples 3600 joint_samples 10 [203115, 1036443] processed_samples 3600 unjoint_samples 3600 joint_samples 10 [1047859, 537476] processed_samples 3600 unjoint_samples 3600 joint_samples 10 [142291, 1047633] processed_samples 3600 unjoint_samples 3600 joint_samples 10 [142291, 1047633] processed_samples 3600 unjoint_samples 3600 joint_samples 10 [1047859, 537476] processed_samples 3600 unjoint_samples 3600 joint_samples 10 [117376, 1046740] processed_samples 3600 unjoint_samples 3600 joint_samples 10 [117376, 1046740] processed_samples 3600 unjoint_samples 3600 joint_samples 10 [1048220, 191278] processed_samples 3600 unjoint_samples 3600 joint_samples 10 [1048220, 191278] processed_samples 3600 unjoint_samples 3600 joint_samples 10 [475001, 1047635] processed_samples 3600 unjoint_samples 3600 joint_samples 10 [475001, 1047635] processed_samples 3600 unjoint_samples 3600 joint_samples 10 [652210, 1047007] processed_samples 3600 unjoint_samples 3600 joint_samples 10 [652210, 1047007] processed_samples 3600 unjoint_samples 3600 joint_samples 9 [725981, 1046177] processed_samples 3600 unjoint_samples 3600 joint_samples 9 [725981, 1046177] [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d378a740] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a6779680] mmco: unref short failure [h264 @ 0x55f3a6779680] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure processed_samples 3700 unjoint_samples 3700 joint_samples 10 [566356, 1036443] processed_samples 3700 unjoint_samples 3700 joint_samples 10 [566356, 1036443] processed_samples 3700 unjoint_samples 3700 joint_samples 10 [1047317, 4690] processed_samples 3700 unjoint_samples 3700 joint_samples 10 [1047317, 4690] processed_samples 3700 unjoint_samples 3700 joint_samples 10 [889919, 1047007] processed_samples 3700 unjoint_samples 3700 joint_samples 10 [889919, 1047007] processed_samples 3700 unjoint_samples 3700 joint_samples 10 [418898, 1047633] processed_samples 3700 unjoint_samples 3700 joint_samples 10 [418898, 1047633] processed_samples 3700 unjoint_samples 3700 joint_samples 10 [346865, 1046740] processed_samples 3700 unjoint_samples 3700 joint_samples 10 [346865, 1046740] processed_samples 3700 unjoint_samples 3700 joint_samples 10 [1048220, 479972] processed_samples 3700 unjoint_samples 3700 joint_samples 10 [1048220, 479972] processed_samples 3700 unjoint_samples 3700 joint_samples 10 [1047859, 930216] processed_samples 3700 unjoint_samples 3700 joint_samples 10 [1047859, 930216] processed_samples 3700 unjoint_samples 3700 joint_samples 10 [771623, 1047635] processed_samples 3700 unjoint_samples 3700 joint_samples 10 [771623, 1047635] [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure processed_samples 3800 unjoint_samples 3800 joint_samples 11 [168798, 1046394] processed_samples 3800 unjoint_samples 3800 joint_samples 11 [168798, 1046394] processed_samples 3800 unjoint_samples 3800 joint_samples 10 [901237, 1036443] processed_samples 3800 unjoint_samples 3800 joint_samples 10 [901237, 1036443] processed_samples 3800 unjoint_samples 3800 joint_samples 10 [1047317, 339617] processed_samples 3800 unjoint_samples 3800 joint_samples 10 [1047317, 339617] processed_samples 3800 unjoint_samples 3800 joint_samples 11 [84618, 1047007] processed_samples 3800 unjoint_samples 3800 joint_samples 11 [84618, 1047007] processed_samples 3800 unjoint_samples 3800 joint_samples 11 [1047053, 84197] processed_samples 3800 unjoint_samples 3800 joint_samples 10 [1048220, 928635] processed_samples 3800 unjoint_samples 3800 joint_samples 11 [1047053, 84197] processed_samples 3800 unjoint_samples 3800 joint_samples 10 [1048220, 928635] processed_samples 3800 unjoint_samples 3800 joint_samples 10 [599885, 1046740] processed_samples 3800 unjoint_samples 3800 joint_samples 10 [599885, 1046740] processed_samples 3800 unjoint_samples 3800 joint_samples 10 [727425, 1047633] processed_samples 3800 unjoint_samples 3800 joint_samples 10 [727425, 1047633] [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3ad7ec440] mmco: unref short failure [h264 @ 0x55f3ad7ec440] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure processed_samples 3900 unjoint_samples 3900 joint_samples 11 [1048270, 79820] processed_samples 3900 unjoint_samples 3900 joint_samples 11 [1048270, 79820] processed_samples 3900 unjoint_samples 3900 joint_samples 10 [1047317, 818374] processed_samples 3900 unjoint_samples 3900 joint_samples 10 [1047317, 818374] processed_samples 3900 unjoint_samples 3900 joint_samples 11 [479089, 1046394] processed_samples 3900 unjoint_samples 3900 joint_samples 11 [1047053, 553016] processed_samples 3900 unjoint_samples 3900 joint_samples 11 [479089, 1046394] processed_samples 3900 unjoint_samples 3900 joint_samples 11 [1047053, 553016] processed_samples 3900 unjoint_samples 3900 joint_samples 11 [1046360, 33466] processed_samples 3900 unjoint_samples 3900 joint_samples 11 [359383, 1047007] processed_samples 3900 unjoint_samples 3900 joint_samples 11 [359383, 1047007] processed_samples 3900 unjoint_samples 3900 joint_samples 11 [1046360, 33466] processed_samples 3900 unjoint_samples 3900 joint_samples 11 [190579, 1046880] processed_samples 3900 unjoint_samples 3900 joint_samples 11 [190579, 1046880] processed_samples 3900 unjoint_samples 3900 joint_samples 10 [868551, 1046740] processed_samples 3900 unjoint_samples 3900 joint_samples 10 [868551, 1046740] [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3aac3b340] Missing reference picture, default is 65530 [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d34c28c0] Missing reference picture, default is 65530 [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a6a72040] mmco: unref short failure [h264 @ 0x55f3a6a72040] mmco: unref short failure [h264 @ 0x55f3a6a72040] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d2636880] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure processed_samples 4000 unjoint_samples 4000 joint_samples 11 [633124, 1047007] processed_samples 4000 unjoint_samples 4000 joint_samples 11 [633124, 1047007] processed_samples 4000 unjoint_samples 4000 joint_samples 11 [1048270, 446246] processed_samples 4000 unjoint_samples 4000 joint_samples 11 [1047317, 66952] processed_samples 4000 unjoint_samples 4000 joint_samples 11 [1047317, 66952] processed_samples 4000 unjoint_samples 4000 joint_samples 11 [1048270, 446246] processed_samples 4000 unjoint_samples 4000 joint_samples 11 [1046360, 336397] processed_samples 4000 unjoint_samples 4000 joint_samples 11 [1046360, 336397] processed_samples 4000 unjoint_samples 4000 joint_samples 11 [1040493, 164696] processed_samples 4000 unjoint_samples 4000 joint_samples 11 [1040493, 164696] processed_samples 4000 unjoint_samples 4000 joint_samples 11 [535373, 1046880] processed_samples 4000 unjoint_samples 4000 joint_samples 11 [535373, 1046880] processed_samples 4000 unjoint_samples 4000 joint_samples 11 [744241, 1046394] processed_samples 4000 unjoint_samples 4000 joint_samples 11 [1047053, 760087] processed_samples 4000 unjoint_samples 4000 joint_samples 11 [744241, 1046394] processed_samples 4000 unjoint_samples 4000 joint_samples 11 [1047053, 760087] [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d378a740] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d4f56680] mmco: unref short failure [h264 @ 0x55e9d4f56680] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d4f56680] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d34cfd80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure processed_samples 4100 unjoint_samples 4100 joint_samples 11 [1047317, 374195] processed_samples 4100 unjoint_samples 4100 joint_samples 11 [1047317, 374195] processed_samples 4100 unjoint_samples 4100 joint_samples 11 [1048270, 702478] processed_samples 4100 unjoint_samples 4100 joint_samples 11 [1048270, 702478] processed_samples 4100 unjoint_samples 4100 joint_samples 11 [1046360, 639351] processed_samples 4100 unjoint_samples 4100 joint_samples 11 [1046360, 639351] [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure processed_samples 4100 unjoint_samples 4100 joint_samples 11 [1040493, 445831] processed_samples 4100 unjoint_samples 4100 joint_samples 12 [1047053, 130959] processed_samples 4100 unjoint_samples 4100 joint_samples 12 [1047053, 130959] processed_samples 4100 unjoint_samples 4100 joint_samples 11 [1040493, 445831] processed_samples 4100 unjoint_samples 4100 joint_samples 11 [803953, 1046880] processed_samples 4100 unjoint_samples 4100 joint_samples 11 [803953, 1046880] processed_samples 4100 unjoint_samples 4100 joint_samples 11 [944059, 1047007] processed_samples 4100 unjoint_samples 4100 joint_samples 11 [944059, 1047007] processed_samples 4100 unjoint_samples 4100 joint_samples 11 [1046050, 1046394] processed_samples 4100 unjoint_samples 4100 joint_samples 11 [1046050, 1046394] [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d7323280] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d7323280] mmco: unref short failure [h264 @ 0x55e9d7323280] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3ae983680] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure processed_samples 4200 unjoint_samples 4200 joint_samples 11 [1047317, 708659] processed_samples 4200 unjoint_samples 4200 joint_samples 11 [1047317, 708659] processed_samples 4200 unjoint_samples 4200 joint_samples 12 [1047053, 391152] processed_samples 4200 unjoint_samples 4200 joint_samples 12 [1047053, 391152] processed_samples 4200 unjoint_samples 4200 joint_samples 12 [1048308, 231088] processed_samples 4200 unjoint_samples 4200 joint_samples 12 [1048308, 231088] processed_samples 4200 unjoint_samples 4200 joint_samples 12 [1046752, 222650] processed_samples 4200 unjoint_samples 4200 joint_samples 12 [1046752, 222650] processed_samples 4200 unjoint_samples 4200 joint_samples 12 [156161, 1046880] processed_samples 4200 unjoint_samples 4200 joint_samples 12 [156161, 1046880] processed_samples 4200 unjoint_samples 4200 joint_samples 11 [1048270, 993609] processed_samples 4200 unjoint_samples 4200 joint_samples 11 [1048270, 993609] processed_samples 4200 unjoint_samples 4200 joint_samples 11 [1040493, 871021] processed_samples 4200 unjoint_samples 4200 joint_samples 11 [1040493, 871021] processed_samples 4200 unjoint_samples 4200 joint_samples 11 [1046360, 1026946] processed_samples 4200 unjoint_samples 4200 joint_samples 11 [1046360, 1026946] [h264 @ 0x55f3aaf595c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d4663280] mmco: unref short failure [h264 @ 0x55f3aaf595c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure processed_samples 4300 unjoint_samples 4300 joint_samples 12 [293724, 1038872] processed_samples 4300 unjoint_samples 4300 joint_samples 12 [293724, 1038872] processed_samples 4300 unjoint_samples 4300 joint_samples 12 [593962, 1046880] processed_samples 4300 unjoint_samples 4300 joint_samples 12 [593962, 1046880] processed_samples 4300 unjoint_samples 4300 joint_samples 12 [1046752, 532179] processed_samples 4300 unjoint_samples 4300 joint_samples 12 [1046752, 532179] processed_samples 4300 unjoint_samples 4300 joint_samples 12 [226308, 1047345] processed_samples 4300 unjoint_samples 4300 joint_samples 12 [226308, 1047345] processed_samples 4300 unjoint_samples 4300 joint_samples 12 [133489, 1046544] processed_samples 4300 unjoint_samples 4300 joint_samples 12 [133489, 1046544] processed_samples 4300 unjoint_samples 4300 joint_samples 12 [1047053, 742270] processed_samples 4300 unjoint_samples 4300 joint_samples 12 [1047053, 742270] processed_samples 4300 unjoint_samples 4300 joint_samples 12 [1048308, 587503] processed_samples 4300 unjoint_samples 4300 joint_samples 12 [1048308, 587503] processed_samples 4300 unjoint_samples 4300 joint_samples 11 [1047317, 1014692] processed_samples 4300 unjoint_samples 4300 joint_samples 11 [1047317, 1014692] [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d4fa9080] mmco: unref short failure [h264 @ 0x55e9d4fa9080] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d4fa9080] mmco: unref short failure [h264 @ 0x55e9d4fa9080] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3ad7ec440] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3ad7ec440] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure processed_samples 4400 unjoint_samples 4400 joint_samples 13 [276471, 940277] processed_samples 4400 unjoint_samples 4400 joint_samples 13 [276471, 940277] processed_samples 4400 unjoint_samples 4400 joint_samples 12 [248540, 1035523] processed_samples 4400 unjoint_samples 4400 joint_samples 12 [248540, 1035523] processed_samples 4400 unjoint_samples 4400 joint_samples 12 [446934, 1046544] processed_samples 4400 unjoint_samples 4400 joint_samples 12 [446934, 1046544] processed_samples 4400 unjoint_samples 4400 joint_samples 12 [559755, 1038872] processed_samples 4400 unjoint_samples 4400 joint_samples 12 [559755, 1038872] processed_samples 4400 unjoint_samples 4400 joint_samples 12 [509125, 1047345] processed_samples 4400 unjoint_samples 4400 joint_samples 12 [509125, 1047345] processed_samples 4400 unjoint_samples 4400 joint_samples 12 [846201, 1046880] processed_samples 4400 unjoint_samples 4400 joint_samples 12 [846201, 1046880] processed_samples 4400 unjoint_samples 4400 joint_samples 12 [1046752, 809433] processed_samples 4400 unjoint_samples 4400 joint_samples 12 [1046752, 809433] processed_samples 4400 unjoint_samples 4400 joint_samples 12 [1048308, 905462] processed_samples 4400 unjoint_samples 4400 joint_samples 12 [1048308, 905462] [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3aaff3800] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d43cde40] mmco: unref short failure [h264 @ 0x55e9d43cde40] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3ae983680] mmco: unref short failure [h264 @ 0x55f3ae983680] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3ae983680] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure processed_samples 4500 unjoint_samples 4500 joint_samples 13 [1046752, 42100] [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure processed_samples 4500 unjoint_samples 4500 joint_samples 13 [1046752, 42100] processed_samples 4500 unjoint_samples 4500 joint_samples 13 [106985, 1046830] processed_samples 4500 unjoint_samples 4500 joint_samples 13 [106985, 1046830] [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure processed_samples 4500 unjoint_samples 4500 joint_samples 13 [1039320, 201329] processed_samples 4500 unjoint_samples 4500 joint_samples 13 [1039320, 201329] processed_samples 4500 unjoint_samples 4500 joint_samples 13 [618743, 940277] processed_samples 4500 unjoint_samples 4500 joint_samples 12 [792545, 1047345] processed_samples 4500 unjoint_samples 4500 joint_samples 13 [618743, 940277] processed_samples 4500 unjoint_samples 4500 joint_samples 12 [792545, 1047345] processed_samples 4500 unjoint_samples 4500 joint_samples 12 [646486, 1035523] processed_samples 4500 unjoint_samples 4500 joint_samples 12 [646486, 1035523] processed_samples 4500 unjoint_samples 4500 joint_samples 12 [873867, 1038872] processed_samples 4500 unjoint_samples 4500 joint_samples 12 [873867, 1038872] processed_samples 4500 unjoint_samples 4500 joint_samples 12 [721940, 1046544] processed_samples 4500 unjoint_samples 4500 joint_samples 12 [721940, 1046544] [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d09bb780] mmco: unref short failure [h264 @ 0x55e9d09bb780] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure processed_samples 4600 unjoint_samples 4600 joint_samples 13 [1045854, 101368] processed_samples 4600 unjoint_samples 4600 joint_samples 13 [1045854, 101368] processed_samples 4600 unjoint_samples 4600 joint_samples 13 [1026136, 65950] processed_samples 4600 unjoint_samples 4600 joint_samples 13 [1026136, 65950] processed_samples 4600 unjoint_samples 4600 joint_samples 13 [1046286, 15739] processed_samples 4600 unjoint_samples 4600 joint_samples 13 [1046286, 15739] processed_samples 4600 unjoint_samples 4600 joint_samples 13 [449319, 1046830] processed_samples 4600 unjoint_samples 4600 joint_samples 13 [449319, 1046830] processed_samples 4600 unjoint_samples 4600 joint_samples 13 [898165, 940277] processed_samples 4600 unjoint_samples 4600 joint_samples 13 [898165, 940277] processed_samples 4600 unjoint_samples 4600 joint_samples 13 [1046752, 417451] processed_samples 4600 unjoint_samples 4600 joint_samples 13 [1046752, 417451] processed_samples 4600 unjoint_samples 4600 joint_samples 12 [875933, 1035523] processed_samples 4600 unjoint_samples 4600 joint_samples 12 [875933, 1035523] processed_samples 4600 unjoint_samples 4600 joint_samples 13 [1039320, 564780] processed_samples 4600 unjoint_samples 4600 joint_samples 13 [1039320, 564780] [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d378a740] mmco: unref short failure [h264 @ 0x55e9d378a740] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d09bb780] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d09bb780] mmco: unref short failure [h264 @ 0x55e9d09bb780] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure processed_samples 4700 unjoint_samples 4700 joint_samples 13 [1044901, 163318] [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure processed_samples 4700 unjoint_samples 4700 joint_samples 13 [1044901, 163318] processed_samples 4700 unjoint_samples 4700 joint_samples 14 [1047545, 105874] processed_samples 4700 unjoint_samples 4700 joint_samples 13 [1026136, 413498] processed_samples 4700 unjoint_samples 4700 joint_samples 13 [1046286, 262649] processed_samples 4700 unjoint_samples 4700 joint_samples 14 [1047545, 105874] processed_samples 4700 unjoint_samples 4700 joint_samples 13 [1026136, 413498] processed_samples 4700 unjoint_samples 4700 joint_samples 13 [1046286, 262649] processed_samples 4700 unjoint_samples 4700 joint_samples 13 [1045854, 346464] processed_samples 4700 unjoint_samples 4700 joint_samples 13 [1045854, 346464] [h264 @ 0x55f3a7e93bc0] mmco: unref short failure processed_samples 4700 unjoint_samples 4700 joint_samples 13 [1046752, 703194] [h264 @ 0x55e9d0725780] mmco: unref short failure processed_samples 4700 unjoint_samples 4700 joint_samples 13 [1046752, 703194] processed_samples 4700 unjoint_samples 4700 joint_samples 13 [785028, 1046830] processed_samples 4700 unjoint_samples 4700 joint_samples 13 [785028, 1046830] processed_samples 4700 unjoint_samples 4700 joint_samples 13 [1039320, 837541] processed_samples 4700 unjoint_samples 4700 joint_samples 13 [1039320, 837541] [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d2636880] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a67ce700] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a67ce700] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure processed_samples 4800 unjoint_samples 4800 joint_samples 13 [1044901, 465940] processed_samples 4800 unjoint_samples 4800 joint_samples 13 [1044901, 465940] processed_samples 4800 unjoint_samples 4800 joint_samples 13 [1045854, 620175] processed_samples 4800 unjoint_samples 4800 joint_samples 13 [1045854, 620175] processed_samples 4800 unjoint_samples 4800 joint_samples 14 [14862, 1046830] processed_samples 4800 unjoint_samples 4800 joint_samples 14 [14862, 1046830] processed_samples 4800 unjoint_samples 4800 joint_samples 14 [1047545, 488451] processed_samples 4800 unjoint_samples 4800 joint_samples 14 [1047545, 488451] [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure processed_samples 4800 unjoint_samples 4800 joint_samples 14 [137412, 1026676] processed_samples 4800 unjoint_samples 4800 joint_samples 14 [137412, 1026676] processed_samples 4800 unjoint_samples 4800 joint_samples 13 [1046752, 969954] processed_samples 4800 unjoint_samples 4800 joint_samples 13 [1026136, 718369] processed_samples 4800 unjoint_samples 4800 joint_samples 13 [1046752, 969954] processed_samples 4800 unjoint_samples 4800 joint_samples 13 [1026136, 718369] processed_samples 4800 unjoint_samples 4800 joint_samples 13 [1046286, 489684] processed_samples 4800 unjoint_samples 4800 joint_samples 13 [1046286, 489684] [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3ad7ec440] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3a679f200] mmco: unref short failure [h264 @ 0x55f3a679f200] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ae983680] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure processed_samples 4900 unjoint_samples 4900 joint_samples 14 [408599, 1034949] processed_samples 4900 unjoint_samples 4900 joint_samples 14 [46855, 1028241] processed_samples 4900 unjoint_samples 4900 joint_samples 14 [408599, 1034949] processed_samples 4900 unjoint_samples 4900 joint_samples 14 [46855, 1028241] processed_samples 4900 unjoint_samples 4900 joint_samples 14 [336496, 1046830] processed_samples 4900 unjoint_samples 4900 joint_samples 14 [336496, 1046830] processed_samples 4900 unjoint_samples 4900 joint_samples 14 [442281, 1026676] processed_samples 4900 unjoint_samples 4900 joint_samples 14 [442281, 1026676] processed_samples 4900 unjoint_samples 4900 joint_samples 13 [1045854, 927327] processed_samples 4900 unjoint_samples 4900 joint_samples 13 [1044901, 786394] processed_samples 4900 unjoint_samples 4900 joint_samples 13 [1044901, 786394] processed_samples 4900 unjoint_samples 4900 joint_samples 13 [1045854, 927327] processed_samples 4900 unjoint_samples 4900 joint_samples 13 [1046286, 804425] processed_samples 4900 unjoint_samples 4900 joint_samples 13 [1046286, 804425] processed_samples 4900 unjoint_samples 4900 joint_samples 14 [1047545, 890573] processed_samples 4900 unjoint_samples 4900 joint_samples 14 [1047545, 890573] [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d43cde40] mmco: unref short failure [h264 @ 0x55e9d43cde40] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3aa633b80] mmco: unref short failure [h264 @ 0x55f3aa633b80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d32c2340] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure processed_samples 5000 unjoint_samples 5000 joint_samples 14 [1045854, 228540] processed_samples 5000 unjoint_samples 5000 joint_samples 14 [1045854, 228540] processed_samples 5000 unjoint_samples 5000 joint_samples 14 [32309, 1046158] processed_samples 5000 unjoint_samples 5000 joint_samples 14 [32309, 1046158] [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure processed_samples 5000 unjoint_samples 5000 joint_samples 14 [132682, 1048130] processed_samples 5000 unjoint_samples 5000 joint_samples 14 [132682, 1048130] processed_samples 5000 unjoint_samples 5000 joint_samples 15 [172895, 1037135] processed_samples 5000 unjoint_samples 5000 joint_samples 15 [172895, 1037135] processed_samples 5000 unjoint_samples 5000 joint_samples 14 [683189, 1034949] processed_samples 5000 unjoint_samples 5000 joint_samples 14 [683189, 1034949] processed_samples 5000 unjoint_samples 5000 joint_samples 14 [364610, 1028241] processed_samples 5000 unjoint_samples 5000 joint_samples 14 [364610, 1028241] processed_samples 5000 unjoint_samples 5000 joint_samples 14 [790255, 1026676] processed_samples 5000 unjoint_samples 5000 joint_samples 14 [790255, 1026676] processed_samples 5000 unjoint_samples 5000 joint_samples 14 [759046, 1046830] processed_samples 5000 unjoint_samples 5000 joint_samples 14 [759046, 1046830] [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a7ef3a80] mmco: unref short failure [h264 @ 0x55e9d299e100] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a7ef3a80] mmco: unref short failure [h264 @ 0x55f3a7ef3a80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure processed_samples 5100 unjoint_samples 5100 joint_samples 15 [505522, 1037135] processed_samples 5100 unjoint_samples 5100 joint_samples 14 [346794, 1046158] processed_samples 5100 unjoint_samples 5100 joint_samples 15 [505522, 1037135] [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure processed_samples 5100 unjoint_samples 5100 joint_samples 14 [997138, 1046830] processed_samples 5100 unjoint_samples 5100 joint_samples 14 [1045854, 514057] [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure processed_samples 5100 unjoint_samples 5100 joint_samples 14 [346794, 1046158] processed_samples 5100 unjoint_samples 5100 joint_samples 15 [1004867, 306129] processed_samples 5100 unjoint_samples 5100 joint_samples 14 [1045854, 514057] processed_samples 5100 unjoint_samples 5100 joint_samples 14 [997138, 1046830] processed_samples 5100 unjoint_samples 5100 joint_samples 15 [1004867, 306129] processed_samples 5100 unjoint_samples 5100 joint_samples 14 [1020136, 1034949] processed_samples 5100 unjoint_samples 5100 joint_samples 14 [413419, 1048130] processed_samples 5100 unjoint_samples 5100 joint_samples 14 [1020136, 1034949] processed_samples 5100 unjoint_samples 5100 joint_samples 14 [413419, 1048130] processed_samples 5100 unjoint_samples 5100 joint_samples 14 [691415, 1028241] processed_samples 5100 unjoint_samples 5100 joint_samples 14 [691415, 1028241] [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a6ba3740] mmco: unref short failure [h264 @ 0x55f3a6ba3740] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d2a44f40] mmco: unref short failure [h264 @ 0x55e9d2a44f40] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3a7ef3a80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a75fd2c0] mmco: unref short failure [h264 @ 0x55f3a75fd2c0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure processed_samples 5200 unjoint_samples 5200 joint_samples 15 [36429, 1030285] processed_samples 5200 unjoint_samples 5200 joint_samples 15 [36429, 1030285] processed_samples 5200 unjoint_samples 5200 joint_samples 15 [905671, 1037135] processed_samples 5200 unjoint_samples 5200 joint_samples 15 [905671, 1037135] processed_samples 5200 unjoint_samples 5200 joint_samples 14 [1045854, 825340] processed_samples 5200 unjoint_samples 5200 joint_samples 14 [1045854, 825340] processed_samples 5200 unjoint_samples 5200 joint_samples 15 [1046495, 277857] processed_samples 5200 unjoint_samples 5200 joint_samples 15 [1046495, 277857] processed_samples 5200 unjoint_samples 5200 joint_samples 15 [242395, 1034992] processed_samples 5200 unjoint_samples 5200 joint_samples 15 [242395, 1034992] processed_samples 5200 unjoint_samples 5200 joint_samples 14 [792799, 1048130] processed_samples 5200 unjoint_samples 5200 joint_samples 14 [792799, 1048130] processed_samples 5200 unjoint_samples 5200 joint_samples 14 [721235, 1046158] processed_samples 5200 unjoint_samples 5200 joint_samples 14 [721235, 1046158] processed_samples 5200 unjoint_samples 5200 joint_samples 15 [1004867, 652358] processed_samples 5200 unjoint_samples 5200 joint_samples 15 [1004867, 652358] [h264 @ 0x55e9d3bc39c0] mmco: unref short failure [h264 @ 0x55e9d3bc39c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a7fba640] mmco: unref short failure [h264 @ 0x55f3a7fba640] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a6b80b40] mmco: unref short failure [h264 @ 0x55f3a7593200] mmco: unref short failure [h264 @ 0x55f3a7593200] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure processed_samples 5300 unjoint_samples 5300 joint_samples 15 [1044591, 46332] [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure processed_samples 5300 unjoint_samples 5300 joint_samples 15 [1044591, 46332] processed_samples 5300 unjoint_samples 5300 joint_samples 16 [180357, 1046290] processed_samples 5300 unjoint_samples 5300 joint_samples 16 [180357, 1046290] processed_samples 5300 unjoint_samples 5300 joint_samples 15 [122343, 1046571] processed_samples 5300 unjoint_samples 5300 joint_samples 15 [122343, 1046571] processed_samples 5300 unjoint_samples 5300 joint_samples 14 [954891, 1046158] processed_samples 5300 unjoint_samples 5300 joint_samples 14 [954891, 1046158] processed_samples 5300 unjoint_samples 5300 joint_samples 15 [1046495, 504078] processed_samples 5300 unjoint_samples 5300 joint_samples 15 [1046495, 504078] processed_samples 5300 unjoint_samples 5300 joint_samples 15 [700470, 1034992] processed_samples 5300 unjoint_samples 5300 joint_samples 15 [420391, 1030285] processed_samples 5300 unjoint_samples 5300 joint_samples 15 [420391, 1030285] processed_samples 5300 unjoint_samples 5300 joint_samples 15 [700470, 1034992] processed_samples 5300 unjoint_samples 5300 joint_samples 15 [1004867, 971188] processed_samples 5300 unjoint_samples 5300 joint_samples 15 [1004867, 971188] [h264 @ 0x55f3aac59580] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a675d600] mmco: unref short failure [h264 @ 0x55f3a675d600] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a88ed7c0] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4663280] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure processed_samples 5400 unjoint_samples 5400 joint_samples 16 [1036677, 84115] processed_samples 5400 unjoint_samples 5400 joint_samples 15 [1046997, 160845] processed_samples 5400 unjoint_samples 5400 joint_samples 15 [1046997, 160845] processed_samples 5400 unjoint_samples 5400 joint_samples 16 [1036677, 84115] processed_samples 5400 unjoint_samples 5400 joint_samples 15 [1046495, 822018] processed_samples 5400 unjoint_samples 5400 joint_samples 15 [1046495, 822018] processed_samples 5400 unjoint_samples 5400 joint_samples 15 [440424, 1046571] processed_samples 5400 unjoint_samples 5400 joint_samples 15 [440424, 1046571] [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure processed_samples 5400 unjoint_samples 5400 joint_samples 16 [1048210, 205849] processed_samples 5400 unjoint_samples 5400 joint_samples 16 [1048210, 205849] processed_samples 5400 unjoint_samples 5400 joint_samples 15 [1044591, 588017] processed_samples 5400 unjoint_samples 5400 joint_samples 15 [1044591, 588017] processed_samples 5400 unjoint_samples 5400 joint_samples 16 [508517, 1046290] processed_samples 5400 unjoint_samples 5400 joint_samples 16 [508517, 1046290] processed_samples 5400 unjoint_samples 5400 joint_samples 15 [727189, 1030285] processed_samples 5400 unjoint_samples 5400 joint_samples 15 [727189, 1030285] [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a7845540] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a7fba640] mmco: unref short failure [h264 @ 0x55f3a7fba640] mmco: unref short failure [h264 @ 0x55f3a7fba640] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a7ef3a80] mmco: unref short failure [h264 @ 0x55f3a7ef3a80] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a7845540] mmco: unref short failure [h264 @ 0x55f3a7845540] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a7845540] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a7ef3a80] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a7845540] mmco: unref short failure [h264 @ 0x55f3a7845540] mmco: unref short failure processed_samples 5500 unjoint_samples 5500 joint_samples 16 [1046495, 69746] processed_samples 5500 unjoint_samples 5500 joint_samples 16 [1046495, 69746] processed_samples 5500 unjoint_samples 5500 joint_samples 16 [1036677, 426424] processed_samples 5500 unjoint_samples 5500 joint_samples 16 [821833, 1046290] processed_samples 5500 unjoint_samples 5500 joint_samples 16 [1036677, 426424] processed_samples 5500 unjoint_samples 5500 joint_samples 16 [821833, 1046290] processed_samples 5500 unjoint_samples 5500 joint_samples 15 [712158, 1046571] processed_samples 5500 unjoint_samples 5500 joint_samples 15 [712158, 1046571] processed_samples 5500 unjoint_samples 5500 joint_samples 15 [1046997, 514266] processed_samples 5500 unjoint_samples 5500 joint_samples 15 [1046997, 514266] processed_samples 5500 unjoint_samples 5500 joint_samples 16 [1048210, 445343] processed_samples 5500 unjoint_samples 5500 joint_samples 16 [1048210, 445343] processed_samples 5500 unjoint_samples 5500 joint_samples 15 [994474, 1030285] processed_samples 5500 unjoint_samples 5500 joint_samples 15 [994474, 1030285] processed_samples 5500 unjoint_samples 5500 joint_samples 15 [1044591, 859853] processed_samples 5500 unjoint_samples 5500 joint_samples 15 [1044591, 859853] [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure processed_samples 5600 unjoint_samples 5600 joint_samples 16 [1046495, 323455] processed_samples 5600 unjoint_samples 5600 joint_samples 16 [1046495, 323455] processed_samples 5600 unjoint_samples 5600 joint_samples 16 [68416, 1043613] processed_samples 5600 unjoint_samples 5600 joint_samples 17 [1040579, 72238] processed_samples 5600 unjoint_samples 5600 joint_samples 16 [68416, 1043613] processed_samples 5600 unjoint_samples 5600 joint_samples 17 [1040579, 72238] processed_samples 5600 unjoint_samples 5600 joint_samples 16 [273630, 1046986] processed_samples 5600 unjoint_samples 5600 joint_samples 16 [273630, 1046986] processed_samples 5600 unjoint_samples 5600 joint_samples 16 [1036677, 755596] processed_samples 5600 unjoint_samples 5600 joint_samples 16 [1036677, 755596] processed_samples 5600 unjoint_samples 5600 joint_samples 16 [1048210, 799146] processed_samples 5600 unjoint_samples 5600 joint_samples 16 [1048210, 799146] processed_samples 5600 unjoint_samples 5600 joint_samples 15 [1046997, 951410] processed_samples 5600 unjoint_samples 5600 joint_samples 15 [1046997, 951410] processed_samples 5600 unjoint_samples 5600 joint_samples 15 [991865, 1046571] processed_samples 5600 unjoint_samples 5600 joint_samples 15 [991865, 1046571] [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3a72a90c0] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure processed_samples 5700 unjoint_samples 5700 joint_samples 17 [31083, 1039313] processed_samples 5700 unjoint_samples 5700 joint_samples 17 [31083, 1039313] processed_samples 5700 unjoint_samples 5700 joint_samples 17 [1043358, 87134] processed_samples 5700 unjoint_samples 5700 joint_samples 17 [1043358, 87134] processed_samples 5700 unjoint_samples 5700 joint_samples 16 [1035130, 339677] processed_samples 5700 unjoint_samples 5700 joint_samples 16 [1035130, 339677] processed_samples 5700 unjoint_samples 5700 joint_samples 16 [284251, 1043613] processed_samples 5700 unjoint_samples 5700 joint_samples 16 [284251, 1043613] processed_samples 5700 unjoint_samples 5700 joint_samples 16 [1046495, 655505] processed_samples 5700 unjoint_samples 5700 joint_samples 16 [1046495, 655505] processed_samples 5700 unjoint_samples 5700 joint_samples 16 [589568, 1046986] processed_samples 5700 unjoint_samples 5700 joint_samples 16 [589568, 1046986] processed_samples 5700 unjoint_samples 5700 joint_samples 17 [1040579, 415120] processed_samples 5700 unjoint_samples 5700 joint_samples 17 [1040579, 415120] processed_samples 5700 unjoint_samples 5700 joint_samples 16 [1046997, 222886] processed_samples 5700 unjoint_samples 5700 joint_samples 16 [1046997, 222886] [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d2a44f40] mmco: unref short failure [h264 @ 0x55e9d2a44f40] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aaf595c0] mmco: unref short failure [h264 @ 0x55f3aaf595c0] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d568b380] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d7323280] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure processed_samples 5800 unjoint_samples 5800 joint_samples 17 [1040579, 644242] processed_samples 5800 unjoint_samples 5800 joint_samples 17 [1040579, 644242] processed_samples 5800 unjoint_samples 5800 joint_samples 17 [1043358, 400574] [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure processed_samples 5800 unjoint_samples 5800 joint_samples 17 [1043358, 400574] processed_samples 5800 unjoint_samples 5800 joint_samples 16 [1035130, 642825] processed_samples 5800 unjoint_samples 5800 joint_samples 16 [1046997, 483571] processed_samples 5800 unjoint_samples 5800 joint_samples 16 [1046997, 483571] processed_samples 5800 unjoint_samples 5800 joint_samples 16 [1035130, 642825] [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure processed_samples 5800 unjoint_samples 5800 joint_samples 17 [300449, 1039313] processed_samples 5800 unjoint_samples 5800 joint_samples 17 [300449, 1039313] processed_samples 5800 unjoint_samples 5800 joint_samples 16 [601667, 1043613] processed_samples 5800 unjoint_samples 5800 joint_samples 16 [979643, 1046986] processed_samples 5800 unjoint_samples 5800 joint_samples 16 [601667, 1043613] processed_samples 5800 unjoint_samples 5800 joint_samples 16 [979643, 1046986] processed_samples 5800 unjoint_samples 5800 joint_samples 16 [1046495, 965484] processed_samples 5800 unjoint_samples 5800 joint_samples 16 [1046495, 965484] [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d7077800] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d446c1c0] mmco: unref short failure [h264 @ 0x55e9d446c1c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure processed_samples 5900 unjoint_samples 5900 joint_samples 17 [1043358, 735701] processed_samples 5900 unjoint_samples 5900 joint_samples 17 [1043358, 735701] processed_samples 5900 unjoint_samples 5900 joint_samples 17 [1015055, 245710] processed_samples 5900 unjoint_samples 5900 joint_samples 16 [1041469, 1043613] processed_samples 5900 unjoint_samples 5900 joint_samples 16 [1041469, 1043613] processed_samples 5900 unjoint_samples 5900 joint_samples 17 [1015055, 245710] processed_samples 5900 unjoint_samples 5900 joint_samples 17 [432845, 981599] processed_samples 5900 unjoint_samples 5900 joint_samples 17 [432845, 981599] processed_samples 5900 unjoint_samples 5900 joint_samples 16 [1046997, 739495] processed_samples 5900 unjoint_samples 5900 joint_samples 16 [1046997, 739495] processed_samples 5900 unjoint_samples 5900 joint_samples 17 [632961, 1039313] processed_samples 5900 unjoint_samples 5900 joint_samples 17 [632961, 1039313] processed_samples 5900 unjoint_samples 5900 joint_samples 16 [1035130, 1008763] processed_samples 5900 unjoint_samples 5900 joint_samples 16 [1035130, 1008763] processed_samples 5900 unjoint_samples 5900 joint_samples 17 [1040579, 988809] processed_samples 5900 unjoint_samples 5900 joint_samples 17 [1040579, 988809] [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure processed_samples 6000 unjoint_samples 6000 joint_samples 17 [1035130, 326437] processed_samples 6000 unjoint_samples 6000 joint_samples 17 [1035130, 326437] processed_samples 6000 unjoint_samples 6000 joint_samples 17 [1015055, 632544] processed_samples 6000 unjoint_samples 6000 joint_samples 17 [1015055, 632544] processed_samples 6000 unjoint_samples 6000 joint_samples 18 [1046346, 215542] processed_samples 6000 unjoint_samples 6000 joint_samples 18 [1046346, 215542] processed_samples 6000 unjoint_samples 6000 joint_samples 17 [1046304, 378357] processed_samples 6000 unjoint_samples 6000 joint_samples 17 [1046304, 378357] processed_samples 6000 unjoint_samples 6000 joint_samples 17 [862474, 981599] processed_samples 6000 unjoint_samples 6000 joint_samples 17 [888957, 1039313] processed_samples 6000 unjoint_samples 6000 joint_samples 17 [862474, 981599] processed_samples 6000 unjoint_samples 6000 joint_samples 17 [888957, 1039313] processed_samples 6000 unjoint_samples 6000 joint_samples 16 [1046997, 1029163] processed_samples 6000 unjoint_samples 6000 joint_samples 16 [1046997, 1029163] processed_samples 6000 unjoint_samples 6000 joint_samples 18 [44031, 1010450] processed_samples 6000 unjoint_samples 6000 joint_samples 18 [44031, 1010450] [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d378a740] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure processed_samples 6100 unjoint_samples 6100 joint_samples 18 [88024, 1033912] processed_samples 6100 unjoint_samples 6100 joint_samples 17 [253817, 1044368] processed_samples 6100 unjoint_samples 6100 joint_samples 18 [88024, 1033912] processed_samples 6100 unjoint_samples 6100 joint_samples 17 [253817, 1044368] processed_samples 6100 unjoint_samples 6100 joint_samples 18 [322761, 1010450] processed_samples 6100 unjoint_samples 6100 joint_samples 18 [322761, 1010450] processed_samples 6100 unjoint_samples 6100 joint_samples 18 [1046346, 491149] processed_samples 6100 unjoint_samples 6100 joint_samples 17 [1035130, 611308] processed_samples 6100 unjoint_samples 6100 joint_samples 18 [1046346, 491149] processed_samples 6100 unjoint_samples 6100 joint_samples 17 [1035130, 611308] processed_samples 6100 unjoint_samples 6100 joint_samples 17 [1046304, 863797] processed_samples 6100 unjoint_samples 6100 joint_samples 17 [1046304, 863797] processed_samples 6100 unjoint_samples 6100 joint_samples 18 [125216, 1046544] processed_samples 6100 unjoint_samples 6100 joint_samples 18 [125216, 1046544] processed_samples 6100 unjoint_samples 6100 joint_samples 17 [1015055, 900699] processed_samples 6100 unjoint_samples 6100 joint_samples 17 [1015055, 900699] [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a67ce700] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d4fa9080] mmco: unref short failure [h264 @ 0x55e9d4fa9080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d32c2340] mmco: unref short failure [h264 @ 0x55e9d32c2340] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure processed_samples 6200 unjoint_samples 6200 joint_samples 17 [533061, 1044368] processed_samples 6200 unjoint_samples 6200 joint_samples 17 [533061, 1044368] processed_samples 6200 unjoint_samples 6200 joint_samples 18 [655515, 1010450] processed_samples 6200 unjoint_samples 6200 joint_samples 18 [655515, 1010450] processed_samples 6200 unjoint_samples 6200 joint_samples 18 [442977, 1033912] processed_samples 6200 unjoint_samples 6200 joint_samples 18 [442977, 1033912] processed_samples 6200 unjoint_samples 6200 joint_samples 17 [1035130, 875279] processed_samples 6200 unjoint_samples 6200 joint_samples 17 [1035130, 875279] processed_samples 6200 unjoint_samples 6200 joint_samples 18 [1046304, 151333] processed_samples 6200 unjoint_samples 6200 joint_samples 18 [1046304, 151333] processed_samples 6200 unjoint_samples 6200 joint_samples 18 [503120, 1046544] processed_samples 6200 unjoint_samples 6200 joint_samples 18 [503120, 1046544] processed_samples 6200 unjoint_samples 6200 joint_samples 18 [307421, 978129] processed_samples 6200 unjoint_samples 6200 joint_samples 18 [307421, 978129] processed_samples 6200 unjoint_samples 6200 joint_samples 18 [1046346, 1037146] processed_samples 6200 unjoint_samples 6200 joint_samples 18 [1046346, 1037146] [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] [h264 @ 0x55f3a7574280] mmco: unref short failure mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure processed_samples 6300 unjoint_samples 6300 joint_samples 18 [119654, 1046357] processed_samples 6300 unjoint_samples 6300 joint_samples 18 [119654, 1046357] processed_samples 6300 unjoint_samples 6300 joint_samples 19 [1046346, 402839] processed_samples 6300 unjoint_samples 6300 joint_samples 19 [1046346, 402839] processed_samples 6300 unjoint_samples 6300 joint_samples 18 [688487, 1033912] processed_samples 6300 unjoint_samples 6300 joint_samples 18 [688487, 1033912] processed_samples 6300 unjoint_samples 6300 joint_samples 17 [858026, 1044368] processed_samples 6300 unjoint_samples 6300 joint_samples 17 [858026, 1044368] processed_samples 6300 unjoint_samples 6300 joint_samples 18 [1046304, 627347] processed_samples 6300 unjoint_samples 6300 joint_samples 18 [1046304, 627347] processed_samples 6300 unjoint_samples 6300 joint_samples 18 [778633, 1046544] processed_samples 6300 unjoint_samples 6300 joint_samples 18 [778633, 1046544] processed_samples 6300 unjoint_samples 6300 joint_samples 18 [948766, 1010450] processed_samples 6300 unjoint_samples 6300 joint_samples 18 [948766, 1010450] processed_samples 6300 unjoint_samples 6300 joint_samples 18 [569966, 978129] processed_samples 6300 unjoint_samples 6300 joint_samples 18 [569966, 978129] [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d3bc39c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure processed_samples 6400 unjoint_samples 6400 joint_samples 18 [1036471, 137936] processed_samples 6400 unjoint_samples 6400 joint_samples 18 [1036471, 137936] processed_samples 6400 unjoint_samples 6400 joint_samples 18 [531651, 1046357] processed_samples 6400 unjoint_samples 6400 joint_samples 18 [531651, 1046357] processed_samples 6400 unjoint_samples 6400 joint_samples 19 [22605, 1046544] processed_samples 6400 unjoint_samples 6400 joint_samples 19 [22605, 1046544] [h264 @ 0x55e9d58dfd00] mmco: unref short failure processed_samples 6400 unjoint_samples 6400 joint_samples 19 [1014581, 262976] [h264 @ 0x55f3a8bed980] mmco: unref short failure processed_samples 6400 unjoint_samples 6400 joint_samples 19 [1014581, 262976] processed_samples 6400 unjoint_samples 6400 joint_samples 19 [1046346, 802317] processed_samples 6400 unjoint_samples 6400 joint_samples 19 [1046346, 802317] processed_samples 6400 unjoint_samples 6400 joint_samples 18 [857877, 978129] processed_samples 6400 unjoint_samples 6400 joint_samples 18 [857877, 978129] processed_samples 6400 unjoint_samples 6400 joint_samples 18 [1046304, 1035953] processed_samples 6400 unjoint_samples 6400 joint_samples 18 [1046304, 1035953] processed_samples 6400 unjoint_samples 6400 joint_samples 18 [956285, 1033912] processed_samples 6400 unjoint_samples 6400 joint_samples 18 [956285, 1033912] [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure processed_samples 6500 unjoint_samples 6500 joint_samples 20 [1046346, 40521] [h264 @ 0x55f3a79d6300] mmco: unref short failure processed_samples 6500 unjoint_samples 6500 joint_samples 19 [1019805, 66382] processed_samples 6500 unjoint_samples 6500 joint_samples 20 [1046346, 40521] processed_samples 6500 unjoint_samples 6500 joint_samples 19 [1019805, 66382] processed_samples 6500 unjoint_samples 6500 joint_samples 19 [1047318, 178252] processed_samples 6500 unjoint_samples 6500 joint_samples 19 [1047318, 178252] processed_samples 6500 unjoint_samples 6500 joint_samples 18 [1036471, 495175] processed_samples 6500 unjoint_samples 6500 joint_samples 19 [310161, 1044001] processed_samples 6500 unjoint_samples 6500 joint_samples 19 [310161, 1044001] processed_samples 6500 unjoint_samples 6500 joint_samples 18 [1036471, 495175] processed_samples 6500 unjoint_samples 6500 joint_samples 19 [1014581, 575214] processed_samples 6500 unjoint_samples 6500 joint_samples 19 [1014581, 575214] processed_samples 6500 unjoint_samples 6500 joint_samples 19 [368157, 1046544] processed_samples 6500 unjoint_samples 6500 joint_samples 18 [920883, 1046357] processed_samples 6500 unjoint_samples 6500 joint_samples 19 [368157, 1046544] processed_samples 6500 unjoint_samples 6500 joint_samples 18 [920883, 1046357] [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure processed_samples 6600 unjoint_samples 6600 joint_samples 19 [159685, 1046357] processed_samples 6600 unjoint_samples 6600 joint_samples 19 [159685, 1046357] processed_samples 6600 unjoint_samples 6600 joint_samples 18 [1036471, 794533] processed_samples 6600 unjoint_samples 6600 joint_samples 18 [1036471, 794533] processed_samples 6600 unjoint_samples 6600 joint_samples 19 [1019805, 370180] processed_samples 6600 unjoint_samples 6600 joint_samples 19 [1019805, 370180] processed_samples 6600 unjoint_samples 6600 joint_samples 20 [1046346, 349612] processed_samples 6600 unjoint_samples 6600 joint_samples 19 [586255, 1044001] processed_samples 6600 unjoint_samples 6600 joint_samples 20 [1046346, 349612] processed_samples 6600 unjoint_samples 6600 joint_samples 19 [586255, 1044001] processed_samples 6600 unjoint_samples 6600 joint_samples 19 [1014581, 878036] processed_samples 6600 unjoint_samples 6600 joint_samples 19 [1014581, 878036] processed_samples 6600 unjoint_samples 6600 joint_samples 19 [669615, 1046544] processed_samples 6600 unjoint_samples 6600 joint_samples 19 [669615, 1046544] processed_samples 6600 unjoint_samples 6600 joint_samples 19 [1047318, 475732] processed_samples 6600 unjoint_samples 6600 joint_samples 19 [1047318, 475732] [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d2a44f40] mmco: unref short failure [h264 @ 0x55e9d2a44f40] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d2a44f40] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure local_disk/cognitron_vl/cognitron_vl/data/processor/image_processor.py:265: ResourceWarning: unclosed file <_io.BufferedReader name='/data/data/LMM/Vision-Flan/vision-flan_191-task_1k/images_191task_1k/WIT+detailed_description_597_Greene_Baptist_Church%2C_Greene_ME.jpg'> image = Image.open(img_or_path).convert("RGB") local_disk/cognitron_vl/cognitron_vl/data/processor/image_processor.py:265: ResourceWarning: unclosed file <_io.BufferedReader name='/data/data/LMM/Vision-Flan/vision-flan_191-task_1k/images_191task_1k/WIT+detailed_description_597_Greene_Baptist_Church%2C_Greene_ME.jpg'> image = Image.open(img_or_path).convert("RGB") [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure processed_samples 6700 unjoint_samples 6700 joint_samples 20 [143790, 1036824] processed_samples 6700 unjoint_samples 6700 joint_samples 20 [143790, 1036824] processed_samples 6700 unjoint_samples 6700 joint_samples 19 [110411, 1026369] processed_samples 6700 unjoint_samples 6700 joint_samples 19 [110411, 1026369] processed_samples 6700 unjoint_samples 6700 joint_samples 19 [474109, 1046357] processed_samples 6700 unjoint_samples 6700 joint_samples 19 [474109, 1046357] processed_samples 6700 unjoint_samples 6700 joint_samples 19 [1019805, 752976] processed_samples 6700 unjoint_samples 6700 joint_samples 19 [1019805, 752976] processed_samples 6700 unjoint_samples 6700 joint_samples 20 [1046346, 679239] processed_samples 6700 unjoint_samples 6700 joint_samples 20 [1046346, 679239] processed_samples 6700 unjoint_samples 6700 joint_samples 19 [1047318, 809227] processed_samples 6700 unjoint_samples 6700 joint_samples 20 [13207, 1046544] processed_samples 6700 unjoint_samples 6700 joint_samples 19 [1047318, 809227] processed_samples 6700 unjoint_samples 6700 joint_samples 20 [13207, 1046544] [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure processed_samples 6700 unjoint_samples 6700 joint_samples 19 [851851, 1044001] processed_samples 6700 unjoint_samples 6700 joint_samples 19 [851851, 1044001] [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d32c2340] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure processed_samples 6800 unjoint_samples 6800 joint_samples 19 [463821, 1026369] processed_samples 6800 unjoint_samples 6800 joint_samples 19 [463821, 1026369] [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure processed_samples 6800 unjoint_samples 6800 joint_samples 20 [186476, 1019148] [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure processed_samples 6800 unjoint_samples 6800 joint_samples 20 [186476, 1019148] processed_samples 6800 unjoint_samples 6800 joint_samples 20 [439985, 1036824] processed_samples 6800 unjoint_samples 6800 joint_samples 20 [439985, 1036824] processed_samples 6800 unjoint_samples 6800 joint_samples 19 [776275, 1046357] processed_samples 6800 unjoint_samples 6800 joint_samples 19 [776275, 1046357] processed_samples 6800 unjoint_samples 6800 joint_samples 20 [997042, 218818] processed_samples 6800 unjoint_samples 6800 joint_samples 20 [1046346, 947067] processed_samples 6800 unjoint_samples 6800 joint_samples 20 [1046346, 947067] processed_samples 6800 unjoint_samples 6800 joint_samples 20 [997042, 218818] processed_samples 6800 unjoint_samples 6800 joint_samples 20 [515507, 1046544] processed_samples 6800 unjoint_samples 6800 joint_samples 19 [1046270, 1047112] processed_samples 6800 unjoint_samples 6800 joint_samples 20 [515507, 1046544] [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure processed_samples 6800 unjoint_samples 6800 joint_samples 19 [1046270, 1047112] [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55e9d2523300] mmco: unref short failure [h264 @ 0x55e9d2523300] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55f3a676fac0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure processed_samples 6900 unjoint_samples 6900 joint_samples 20 [71885, 1046357] processed_samples 6900 unjoint_samples 6900 joint_samples 20 [71885, 1046357] processed_samples 6900 unjoint_samples 6900 joint_samples 20 [347036, 1047112] processed_samples 6900 unjoint_samples 6900 joint_samples 20 [347036, 1047112] processed_samples 6900 unjoint_samples 6900 joint_samples 21 [173230, 1047292] processed_samples 6900 unjoint_samples 6900 joint_samples 21 [173230, 1047292] processed_samples 6900 unjoint_samples 6900 joint_samples 19 [732422, 1026369] processed_samples 6900 unjoint_samples 6900 joint_samples 19 [732422, 1026369] processed_samples 6900 unjoint_samples 6900 joint_samples 20 [997042, 485989] processed_samples 6900 unjoint_samples 6900 joint_samples 20 [997042, 485989] processed_samples 6900 unjoint_samples 6900 joint_samples 20 [510561, 1019148] processed_samples 6900 unjoint_samples 6900 joint_samples 20 [510561, 1019148] processed_samples 6900 unjoint_samples 6900 joint_samples 20 [791656, 1036824] processed_samples 6900 unjoint_samples 6900 joint_samples 20 [791656, 1036824] processed_samples 6900 unjoint_samples 6900 joint_samples 20 [850974, 1046544] processed_samples 6900 unjoint_samples 6900 joint_samples 20 [850974, 1046544] [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d3b9e940] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3a679f200] mmco: unref short failure processed_samples 7000 unjoint_samples 7000 joint_samples 20 [372683, 1046357] processed_samples 7000 unjoint_samples 7000 joint_samples 20 [372683, 1046357] processed_samples 7000 unjoint_samples 7000 joint_samples 21 [990840, 235754] processed_samples 7000 unjoint_samples 7000 joint_samples 21 [990840, 235754] processed_samples 7000 unjoint_samples 7000 joint_samples 20 [719713, 1019148] processed_samples 7000 unjoint_samples 7000 joint_samples 20 [719713, 1019148] processed_samples 7000 unjoint_samples 7000 joint_samples 20 [997042, 793210] processed_samples 7000 unjoint_samples 7000 joint_samples 20 [997042, 793210] processed_samples 7000 unjoint_samples 7000 joint_samples 20 [702739, 1047112] processed_samples 7000 unjoint_samples 7000 joint_samples 21 [491314, 1047292] processed_samples 7000 unjoint_samples 7000 joint_samples 20 [1042431, 1042196] processed_samples 7000 unjoint_samples 7000 joint_samples 21 [491314, 1047292] processed_samples 7000 unjoint_samples 7000 joint_samples 20 [702739, 1047112] processed_samples 7000 unjoint_samples 7000 joint_samples 20 [1042431, 1042196] processed_samples 7000 unjoint_samples 7000 joint_samples 19 [1029552, 1040028] processed_samples 7000 unjoint_samples 7000 joint_samples 19 [1029552, 1040028] [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3aa8104c0] mmco: unref short failure [h264 @ 0x55f3aa8104c0] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d446c1c0] mmco: unref short failure [h264 @ 0x55e9d446c1c0] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55e9d446c1c0] mmco: unref short failure [h264 @ 0x55e9d446c1c0] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure processed_samples 7100 unjoint_samples 7100 joint_samples 20 [1037934, 288077] processed_samples 7100 unjoint_samples 7100 joint_samples 20 [1037934, 288077] processed_samples 7100 unjoint_samples 7100 joint_samples 21 [1045208, 330069] processed_samples 7100 unjoint_samples 7100 joint_samples 21 [1045208, 330069] processed_samples 7100 unjoint_samples 7100 joint_samples 20 [705936, 1046357] processed_samples 7100 unjoint_samples 7100 joint_samples 20 [705936, 1046357] processed_samples 7100 unjoint_samples 7100 joint_samples 20 [974526, 1019148] processed_samples 7100 unjoint_samples 7100 joint_samples 21 [990840, 477828] processed_samples 7100 unjoint_samples 7100 joint_samples 21 [990840, 477828] processed_samples 7100 unjoint_samples 7100 joint_samples 20 [974526, 1019148] processed_samples 7100 unjoint_samples 7100 joint_samples 21 [748338, 1047292] processed_samples 7100 unjoint_samples 7100 joint_samples 21 [748338, 1047292] processed_samples 7100 unjoint_samples 7100 joint_samples 20 [984075, 1047112] processed_samples 7100 unjoint_samples 7100 joint_samples 20 [984075, 1047112] processed_samples 7100 unjoint_samples 7100 joint_samples 20 [1015824, 1015543] processed_samples 7100 unjoint_samples 7100 joint_samples 20 [1015824, 1015543] [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d2389280] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure processed_samples 7200 unjoint_samples 7200 joint_samples 21 [1046187, 279305] processed_samples 7200 unjoint_samples 7200 joint_samples 21 [1046187, 279305] processed_samples 7200 unjoint_samples 7200 joint_samples 21 [225814, 1026286] processed_samples 7200 unjoint_samples 7200 joint_samples 21 [292159, 1045700] processed_samples 7200 unjoint_samples 7200 joint_samples 21 [225814, 1026286] processed_samples 7200 unjoint_samples 7200 joint_samples 21 [292159, 1045700] processed_samples 7200 unjoint_samples 7200 joint_samples 20 [1037934, 544231] processed_samples 7200 unjoint_samples 7200 joint_samples 21 [1045208, 660543] processed_samples 7200 unjoint_samples 7200 joint_samples 20 [1037934, 544231] processed_samples 7200 unjoint_samples 7200 joint_samples 21 [990840, 773299] processed_samples 7200 unjoint_samples 7200 joint_samples 21 [1045208, 660543] processed_samples 7200 unjoint_samples 7200 joint_samples 21 [990840, 773299] processed_samples 7200 unjoint_samples 7200 joint_samples 21 [997397, 1047292] processed_samples 7200 unjoint_samples 7200 joint_samples 20 [973647, 1046357] processed_samples 7200 unjoint_samples 7200 joint_samples 20 [973647, 1046357] processed_samples 7200 unjoint_samples 7200 joint_samples 21 [997397, 1047292] [h264 @ 0x55e9d30bc340] mmco: unref short failure [h264 @ 0x55e9d30bc340] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d09bb780] mmco: unref short failure [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55e9d09bb780] mmco: unref short failure [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure processed_samples 7300 unjoint_samples 7300 joint_samples 21 [1044844, 207195] processed_samples 7300 unjoint_samples 7300 joint_samples 21 [1044844, 207195] processed_samples 7300 unjoint_samples 7300 joint_samples 21 [557713, 1026286] processed_samples 7300 unjoint_samples 7300 joint_samples 21 [557713, 1026286] processed_samples 7300 unjoint_samples 7300 joint_samples 22 [94147, 1005435] processed_samples 7300 unjoint_samples 7300 joint_samples 22 [94147, 1005435] processed_samples 7300 unjoint_samples 7300 joint_samples 20 [1037934, 865390] processed_samples 7300 unjoint_samples 7300 joint_samples 20 [1037934, 865390] processed_samples 7300 unjoint_samples 7300 joint_samples 22 [1026401, 278133] processed_samples 7300 unjoint_samples 7300 joint_samples 22 [1026401, 278133] processed_samples 7300 unjoint_samples 7300 joint_samples 21 [1046187, 616801] processed_samples 7300 unjoint_samples 7300 joint_samples 21 [701746, 1045700] processed_samples 7300 unjoint_samples 7300 joint_samples 21 [701746, 1045700] processed_samples 7300 unjoint_samples 7300 joint_samples 21 [1046187, 616801] processed_samples 7300 unjoint_samples 7300 joint_samples 21 [1045208, 890955] processed_samples 7300 unjoint_samples 7300 joint_samples 21 [1045208, 890955] [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d446c1c0] mmco: unref short failure [h264 @ 0x55e9d446c1c0] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure processed_samples 7400 unjoint_samples 7400 joint_samples 21 [111200, 1046913] processed_samples 7400 unjoint_samples 7400 joint_samples 21 [111200, 1046913] [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure processed_samples 7400 unjoint_samples 7400 joint_samples 22 [112816, 1045613] processed_samples 7400 unjoint_samples 7400 joint_samples 22 [112816, 1045613] processed_samples 7400 unjoint_samples 7400 joint_samples 21 [1046187, 909465] processed_samples 7400 unjoint_samples 7400 joint_samples 21 [1046187, 909465] processed_samples 7400 unjoint_samples 7400 joint_samples 22 [1026401, 651170] processed_samples 7400 unjoint_samples 7400 joint_samples 22 [1026401, 651170] processed_samples 7400 unjoint_samples 7400 joint_samples 21 [1044844, 515514] processed_samples 7400 unjoint_samples 7400 joint_samples 21 [1044844, 515514] processed_samples 7400 unjoint_samples 7400 joint_samples 22 [445772, 1005435] processed_samples 7400 unjoint_samples 7400 joint_samples 22 [445772, 1005435] processed_samples 7400 unjoint_samples 7400 joint_samples 21 [894531, 1026286] processed_samples 7400 unjoint_samples 7400 joint_samples 21 [894531, 1026286] [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure processed_samples 7400 unjoint_samples 7400 joint_samples 21 [1046566, 1047803] processed_samples 7400 unjoint_samples 7400 joint_samples 21 [1046566, 1047803] [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55f3a73fec00] mmco: unref short failure processed_samples 7500 unjoint_samples 7500 joint_samples 21 [459262, 1046913] processed_samples 7500 unjoint_samples 7500 joint_samples 21 [459262, 1046913] processed_samples 7500 unjoint_samples 7500 joint_samples 22 [1043463, 119643] processed_samples 7500 unjoint_samples 7500 joint_samples 22 [1043463, 119643] processed_samples 7500 unjoint_samples 7500 joint_samples 22 [116026, 1046508] processed_samples 7500 unjoint_samples 7500 joint_samples 22 [116026, 1046508] processed_samples 7500 unjoint_samples 7500 joint_samples 22 [1046566, 365875] processed_samples 7500 unjoint_samples 7500 joint_samples 22 [1046566, 365875] [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure processed_samples 7500 unjoint_samples 7500 joint_samples 21 [1044844, 775882] processed_samples 7500 unjoint_samples 7500 joint_samples 21 [1044844, 775882] processed_samples 7500 unjoint_samples 7500 joint_samples 22 [524565, 1045613] processed_samples 7500 unjoint_samples 7500 joint_samples 22 [524565, 1045613] [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure processed_samples 7500 unjoint_samples 7500 joint_samples 22 [1030935, 1030914] processed_samples 7500 unjoint_samples 7500 joint_samples 22 [1030935, 1030914] processed_samples 7500 unjoint_samples 7500 joint_samples 22 [728271, 1005435] processed_samples 7500 unjoint_samples 7500 joint_samples 22 [728271, 1005435] [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d794ecc0] mmco: unref short failure [h264 @ 0x55e9d7323280] mmco: unref short failure [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure processed_samples 7600 unjoint_samples 7600 joint_samples 22 [373238, 1046508] processed_samples 7600 unjoint_samples 7600 joint_samples 22 [894419, 1045613] processed_samples 7600 unjoint_samples 7600 joint_samples 22 [373238, 1046508] processed_samples 7600 unjoint_samples 7600 joint_samples 22 [894419, 1045613] processed_samples 7600 unjoint_samples 7600 joint_samples 22 [64289, 1021554] processed_samples 7600 unjoint_samples 7600 joint_samples 22 [64289, 1021554] processed_samples 7600 unjoint_samples 7600 joint_samples 22 [1043463, 475041] processed_samples 7600 unjoint_samples 7600 joint_samples 23 [1048104, 200748] processed_samples 7600 unjoint_samples 7600 joint_samples 22 [1043463, 475041] processed_samples 7600 unjoint_samples 7600 joint_samples 23 [1048104, 200748] processed_samples 7600 unjoint_samples 7600 joint_samples 21 [802540, 1046913] processed_samples 7600 unjoint_samples 7600 joint_samples 21 [802540, 1046913] processed_samples 7600 unjoint_samples 7600 joint_samples 22 [1046566, 609523] processed_samples 7600 unjoint_samples 7600 joint_samples 22 [1046566, 609523] processed_samples 7600 unjoint_samples 7600 joint_samples 22 [1029521, 1031843] processed_samples 7600 unjoint_samples 7600 joint_samples 22 [1029521, 1031843] [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d2444ac0] mmco: unref short failure [h264 @ 0x55e9d2444ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3a7593200] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a6760500] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure processed_samples 7700 unjoint_samples 7700 joint_samples 22 [354451, 1021554] processed_samples 7700 unjoint_samples 7700 joint_samples 22 [769296, 1046508] processed_samples 7700 unjoint_samples 7700 joint_samples 22 [354451, 1021554] processed_samples 7700 unjoint_samples 7700 joint_samples 23 [1024281, 217731] processed_samples 7700 unjoint_samples 7700 joint_samples 23 [461613, 1047577] processed_samples 7700 unjoint_samples 7700 joint_samples 22 [769296, 1046508] processed_samples 7700 unjoint_samples 7700 joint_samples 23 [1024281, 217731] processed_samples 7700 unjoint_samples 7700 joint_samples 23 [1048104, 501844] processed_samples 7700 unjoint_samples 7700 joint_samples 22 [1043463, 804186] processed_samples 7700 unjoint_samples 7700 joint_samples 23 [461613, 1047577] processed_samples 7700 unjoint_samples 7700 joint_samples 23 [1048104, 501844] processed_samples 7700 unjoint_samples 7700 joint_samples 22 [1043463, 804186] processed_samples 7700 unjoint_samples 7700 joint_samples 21 [1002871, 1046913] processed_samples 7700 unjoint_samples 7700 joint_samples 22 [1046566, 887323] processed_samples 7700 unjoint_samples 7700 joint_samples 21 [1002871, 1046913] processed_samples 7700 unjoint_samples 7700 joint_samples 22 [1046566, 887323] [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d43cde40] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3aaff3800] mmco: unref short failure [h264 @ 0x55f3aaff3800] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure processed_samples 7800 unjoint_samples 7800 joint_samples 22 [251884, 1046913] processed_samples 7800 unjoint_samples 7800 joint_samples 22 [251884, 1046913] processed_samples 7800 unjoint_samples 7800 joint_samples 23 [8286, 1046825] processed_samples 7800 unjoint_samples 7800 joint_samples 23 [8286, 1046825] processed_samples 7800 unjoint_samples 7800 joint_samples 23 [1047666, 63386] processed_samples 7800 unjoint_samples 7800 joint_samples 23 [1047666, 63386] processed_samples 7800 unjoint_samples 7800 joint_samples 23 [1046566, 93965] processed_samples 7800 unjoint_samples 7800 joint_samples 23 [1046566, 93965] processed_samples 7800 unjoint_samples 7800 joint_samples 22 [625631, 1021554] processed_samples 7800 unjoint_samples 7800 joint_samples 22 [625631, 1021554] processed_samples 7800 unjoint_samples 7800 joint_samples 23 [1024281, 568327] processed_samples 7800 unjoint_samples 7800 joint_samples 23 [1024281, 568327] processed_samples 7800 unjoint_samples 7800 joint_samples 23 [1048104, 864693] processed_samples 7800 unjoint_samples 7800 joint_samples 23 [1048104, 864693] processed_samples 7800 unjoint_samples 7800 joint_samples 23 [951138, 1047577] processed_samples 7800 unjoint_samples 7800 joint_samples 23 [951138, 1047577] [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a679f200] mmco: unref short failure [h264 @ 0x55f3a679f200] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure processed_samples 7900 unjoint_samples 7900 joint_samples 23 [1024281, 910159] processed_samples 7900 unjoint_samples 7900 joint_samples 24 [153269, 1023540] processed_samples 7900 unjoint_samples 7900 joint_samples 23 [1024281, 910159] processed_samples 7900 unjoint_samples 7900 joint_samples 23 [328196, 1046825] processed_samples 7900 unjoint_samples 7900 joint_samples 24 [218557, 1047577] processed_samples 7900 unjoint_samples 7900 joint_samples 24 [153269, 1023540] processed_samples 7900 unjoint_samples 7900 joint_samples 23 [1046566, 496529] processed_samples 7900 unjoint_samples 7900 joint_samples 23 [328196, 1046825] processed_samples 7900 unjoint_samples 7900 joint_samples 24 [218557, 1047577] processed_samples 7900 unjoint_samples 7900 joint_samples 23 [1046566, 496529] processed_samples 7900 unjoint_samples 7900 joint_samples 22 [716865, 1046913] processed_samples 7900 unjoint_samples 7900 joint_samples 22 [716865, 1046913] processed_samples 7900 unjoint_samples 7900 joint_samples 22 [1005463, 1021554] processed_samples 7900 unjoint_samples 7900 joint_samples 22 [1005463, 1021554] processed_samples 7900 unjoint_samples 7900 joint_samples 23 [1047666, 322105] processed_samples 7900 unjoint_samples 7900 joint_samples 23 [1047666, 322105] [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d34cfd80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure processed_samples 8000 unjoint_samples 8000 joint_samples 23 [69661, 1046913] processed_samples 8000 unjoint_samples 8000 joint_samples 23 [69661, 1046913] processed_samples 8000 unjoint_samples 8000 joint_samples 23 [215211, 1046448] processed_samples 8000 unjoint_samples 8000 joint_samples 23 [215211, 1046448] processed_samples 8000 unjoint_samples 8000 joint_samples 24 [305890, 1022611] processed_samples 8000 unjoint_samples 8000 joint_samples 24 [305890, 1022611] processed_samples 8000 unjoint_samples 8000 joint_samples 23 [1047666, 671272] processed_samples 8000 unjoint_samples 8000 joint_samples 23 [1047666, 671272] processed_samples 8000 unjoint_samples 8000 joint_samples 24 [535073, 1047577] processed_samples 8000 unjoint_samples 8000 joint_samples 24 [535073, 1047577] processed_samples 8000 unjoint_samples 8000 joint_samples 24 [609175, 1023540] processed_samples 8000 unjoint_samples 8000 joint_samples 23 [581764, 1046825] processed_samples 8000 unjoint_samples 8000 joint_samples 23 [581764, 1046825] processed_samples 8000 unjoint_samples 8000 joint_samples 24 [609175, 1023540] [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure processed_samples 8000 unjoint_samples 8000 joint_samples 23 [1046566, 812049] processed_samples 8000 unjoint_samples 8000 joint_samples 23 [1046566, 812049] [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d43cde40] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure processed_samples 8100 unjoint_samples 8100 joint_samples 23 [387899, 1046913] processed_samples 8100 unjoint_samples 8100 joint_samples 23 [387899, 1046913] processed_samples 8100 unjoint_samples 8100 joint_samples 23 [496611, 1046448] processed_samples 8100 unjoint_samples 8100 joint_samples 24 [93559, 1046261] processed_samples 8100 unjoint_samples 8100 joint_samples 24 [93559, 1046261] processed_samples 8100 unjoint_samples 8100 joint_samples 23 [496611, 1046448] processed_samples 8100 unjoint_samples 8100 joint_samples 23 [889758, 1046825] processed_samples 8100 unjoint_samples 8100 joint_samples 23 [889758, 1046825] processed_samples 8100 unjoint_samples 8100 joint_samples 24 [1005345, 1047577] processed_samples 8100 unjoint_samples 8100 joint_samples 24 [1005345, 1047577] processed_samples 8100 unjoint_samples 8100 joint_samples 24 [566306, 1022611] processed_samples 8100 unjoint_samples 8100 joint_samples 24 [566306, 1022611] processed_samples 8100 unjoint_samples 8100 joint_samples 24 [18259, 1043086] processed_samples 8100 unjoint_samples 8100 joint_samples 24 [18259, 1043086] processed_samples 8100 unjoint_samples 8100 joint_samples 24 [912248, 1023540] processed_samples 8100 unjoint_samples 8100 joint_samples 24 [912248, 1023540] [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d2a44f40] mmco: unref short failure [h264 @ 0x55e9d2a44f40] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d2a44f40] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure processed_samples 8200 unjoint_samples 8200 joint_samples 24 [310786, 1043086] processed_samples 8200 unjoint_samples 8200 joint_samples 24 [310786, 1043086] [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure processed_samples 8200 unjoint_samples 8200 joint_samples 23 [757170, 1046913] [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure processed_samples 8200 unjoint_samples 8200 joint_samples 23 [757170, 1046913] processed_samples 8200 unjoint_samples 8200 joint_samples 24 [1021411, 146996] processed_samples 8200 unjoint_samples 8200 joint_samples 24 [1021411, 146996] processed_samples 8200 unjoint_samples 8200 joint_samples 25 [1008984, 363735] processed_samples 8200 unjoint_samples 8200 joint_samples 25 [1008984, 363735] processed_samples 8200 unjoint_samples 8200 joint_samples 24 [364759, 1046261] processed_samples 8200 unjoint_samples 8200 joint_samples 24 [364759, 1046261] processed_samples 8200 unjoint_samples 8200 joint_samples 24 [874267, 1022611] processed_samples 8200 unjoint_samples 8200 joint_samples 24 [874267, 1022611] processed_samples 8200 unjoint_samples 8200 joint_samples 25 [253927, 1047618] processed_samples 8200 unjoint_samples 8200 joint_samples 25 [253927, 1047618] processed_samples 8200 unjoint_samples 8200 joint_samples 23 [743291, 1046448] processed_samples 8200 unjoint_samples 8200 joint_samples 23 [743291, 1046448] [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55e9d446c1c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d378a740] mmco: unref short failure [h264 @ 0x55e9d378a740] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9c65c5100] mmco: unref short failure [h264 @ 0x55e9c65c5100] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure processed_samples 8300 unjoint_samples 8300 joint_samples 24 [1021411, 368081] processed_samples 8300 unjoint_samples 8300 joint_samples 25 [572585, 1047618] processed_samples 8300 unjoint_samples 8300 joint_samples 24 [627790, 1043086] processed_samples 8300 unjoint_samples 8300 joint_samples 24 [627790, 1043086] processed_samples 8300 unjoint_samples 8300 joint_samples 24 [1021411, 368081] processed_samples 8300 unjoint_samples 8300 joint_samples 25 [572585, 1047618] processed_samples 8300 unjoint_samples 8300 joint_samples 24 [30765, 1046913] processed_samples 8300 unjoint_samples 8300 joint_samples 24 [30765, 1046913] processed_samples 8300 unjoint_samples 8300 joint_samples 25 [229825, 1031239] processed_samples 8300 unjoint_samples 8300 joint_samples 25 [229825, 1031239] processed_samples 8300 unjoint_samples 8300 joint_samples 25 [1008984, 639493] processed_samples 8300 unjoint_samples 8300 joint_samples 25 [1008984, 639493] processed_samples 8300 unjoint_samples 8300 joint_samples 24 [744025, 1046261] processed_samples 8300 unjoint_samples 8300 joint_samples 24 [744025, 1046261] [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure processed_samples 8300 unjoint_samples 8300 joint_samples 23 [1015136, 1046448] processed_samples 8300 unjoint_samples 8300 joint_samples 23 [1015136, 1046448] [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure processed_samples 8400 unjoint_samples 8400 joint_samples 24 [244578, 1046448] processed_samples 8400 unjoint_samples 8400 joint_samples 24 [244578, 1046448] processed_samples 8400 unjoint_samples 8400 joint_samples 25 [1048313, 10340] processed_samples 8400 unjoint_samples 8400 joint_samples 25 [1048313, 10340] processed_samples 8400 unjoint_samples 8400 joint_samples 25 [564614, 1031239] processed_samples 8400 unjoint_samples 8400 joint_samples 25 [564614, 1031239] processed_samples 8400 unjoint_samples 8400 joint_samples 24 [454331, 1046913] processed_samples 8400 unjoint_samples 8400 joint_samples 24 [454331, 1046913] processed_samples 8400 unjoint_samples 8400 joint_samples 24 [1021411, 633403] processed_samples 8400 unjoint_samples 8400 joint_samples 24 [1021411, 633403] processed_samples 8400 unjoint_samples 8400 joint_samples 24 [1020027, 1043086] processed_samples 8400 unjoint_samples 8400 joint_samples 24 [1020027, 1043086] processed_samples 8400 unjoint_samples 8400 joint_samples 25 [1008984, 954531] processed_samples 8400 unjoint_samples 8400 joint_samples 25 [1008984, 954531] processed_samples 8400 unjoint_samples 8400 joint_samples 25 [891569, 1047618] processed_samples 8400 unjoint_samples 8400 joint_samples 25 [891569, 1047618] [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] Missing reference picture, default is 65530 [h264 @ 0x55f3aa7ced80] Missing reference picture, default is 65530 [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] Missing reference picture, default is 65530 [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] Missing reference picture, default is 65530 [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d35a9580] Missing reference picture, default is 65530 [h264 @ 0x55e9d35a9580] Missing reference picture, default is 65530 [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] Missing reference picture, default is 65530 [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] Missing reference picture, default is 65530 [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure processed_samples 8500 unjoint_samples 8500 joint_samples 24 [626356, 1046448] processed_samples 8500 unjoint_samples 8500 joint_samples 24 [626356, 1046448] processed_samples 8500 unjoint_samples 8500 joint_samples 25 [265056, 1043086] processed_samples 8500 unjoint_samples 8500 joint_samples 25 [265056, 1043086] processed_samples 8500 unjoint_samples 8500 joint_samples 26 [1047186, 110471] processed_samples 8500 unjoint_samples 8500 joint_samples 26 [1047186, 110471] processed_samples 8500 unjoint_samples 8500 joint_samples 24 [697968, 1046913] processed_samples 8500 unjoint_samples 8500 joint_samples 24 [697968, 1046913] processed_samples 8500 unjoint_samples 8500 joint_samples 25 [825911, 1031239] processed_samples 8500 unjoint_samples 8500 joint_samples 25 [825911, 1031239] processed_samples 8500 unjoint_samples 8500 joint_samples 26 [1046223, 198033] processed_samples 8500 unjoint_samples 8500 joint_samples 26 [1046223, 198033] processed_samples 8500 unjoint_samples 8500 joint_samples 25 [1048313, 363453] processed_samples 8500 unjoint_samples 8500 joint_samples 25 [1048313, 363453] [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure processed_samples 8500 unjoint_samples 8500 joint_samples 24 [1021411, 929234] processed_samples 8500 unjoint_samples 8500 joint_samples 24 [1021411, 929234] [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d3bc39c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d2636880] mmco: unref short failure [h264 @ 0x55e9d2636880] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d3bc39c0] mmco: unref short failure [h264 @ 0x55e9d3bc39c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d2523300] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure processed_samples 8600 unjoint_samples 8600 joint_samples 26 [1046815, 158950] processed_samples 8600 unjoint_samples 8600 joint_samples 26 [1046815, 158950] processed_samples 8600 unjoint_samples 8600 joint_samples 25 [1036869, 156374] processed_samples 8600 unjoint_samples 8600 joint_samples 25 [1036869, 156374] processed_samples 8600 unjoint_samples 8600 joint_samples 26 [1047186, 362844] processed_samples 8600 unjoint_samples 8600 joint_samples 26 [1047186, 362844] processed_samples 8600 unjoint_samples 8600 joint_samples 25 [590632, 1043086] processed_samples 8600 unjoint_samples 8600 joint_samples 25 [590632, 1043086] processed_samples 8600 unjoint_samples 8600 joint_samples 26 [1046223, 499882] processed_samples 8600 unjoint_samples 8600 joint_samples 26 [1046223, 499882] processed_samples 8600 unjoint_samples 8600 joint_samples 25 [1048313, 849682] processed_samples 8600 unjoint_samples 8600 joint_samples 25 [1048313, 849682] processed_samples 8600 unjoint_samples 8600 joint_samples 24 [937197, 1046448] processed_samples 8600 unjoint_samples 8600 joint_samples 24 [937197, 1046448] [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure processed_samples 8600 unjoint_samples 8600 joint_samples 24 [977655, 1046913] processed_samples 8600 unjoint_samples 8600 joint_samples 24 [977655, 1046913] [h264 @ 0x55f3a679f200] mmco: unref short failure [h264 @ 0x55f3a679f200] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3ae983680] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure processed_samples 8700 unjoint_samples 8700 joint_samples 25 [200905, 1046559] processed_samples 8700 unjoint_samples 8700 joint_samples 25 [200905, 1046559] processed_samples 8700 unjoint_samples 8700 joint_samples 25 [1027102, 438717] processed_samples 8700 unjoint_samples 8700 joint_samples 25 [1027102, 438717] processed_samples 8700 unjoint_samples 8700 joint_samples 26 [281736, 1046627] processed_samples 8700 unjoint_samples 8700 joint_samples 26 [281736, 1046627] processed_samples 8700 unjoint_samples 8700 joint_samples 25 [840828, 1043086] processed_samples 8700 unjoint_samples 8700 joint_samples 25 [840828, 1043086] processed_samples 8700 unjoint_samples 8700 joint_samples 25 [1036869, 508088] processed_samples 8700 unjoint_samples 8700 joint_samples 25 [1036869, 508088] processed_samples 8700 unjoint_samples 8700 joint_samples 26 [1046223, 860050] processed_samples 8700 unjoint_samples 8700 joint_samples 26 [1046223, 860050] processed_samples 8700 unjoint_samples 8700 joint_samples 26 [1047186, 661442] processed_samples 8700 unjoint_samples 8700 joint_samples 26 [1047186, 661442] processed_samples 8700 unjoint_samples 8700 joint_samples 26 [1046815, 605922] processed_samples 8700 unjoint_samples 8700 joint_samples 26 [1046815, 605922] [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d09bb780] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure processed_samples 8800 unjoint_samples 8800 joint_samples 25 [1027102, 750978] processed_samples 8800 unjoint_samples 8800 joint_samples 25 [1027102, 750978] processed_samples 8800 unjoint_samples 8800 joint_samples 26 [1037486, 89055] processed_samples 8800 unjoint_samples 8800 joint_samples 26 [1037486, 89055] processed_samples 8800 unjoint_samples 8800 joint_samples 25 [448658, 1046559] processed_samples 8800 unjoint_samples 8800 joint_samples 25 [448658, 1046559] processed_samples 8800 unjoint_samples 8800 joint_samples 25 [1036869, 915776] processed_samples 8800 unjoint_samples 8800 joint_samples 25 [1036869, 915776] processed_samples 8800 unjoint_samples 8800 joint_samples 27 [97363, 1045274] processed_samples 8800 unjoint_samples 8800 joint_samples 27 [97363, 1045274] processed_samples 8800 unjoint_samples 8800 joint_samples 26 [1046815, 1046320] processed_samples 8800 unjoint_samples 8800 joint_samples 26 [1046815, 1046320] processed_samples 8800 unjoint_samples 8800 joint_samples 26 [530059, 1046627] processed_samples 8800 unjoint_samples 8800 joint_samples 26 [530059, 1046627] processed_samples 8800 unjoint_samples 8800 joint_samples 26 [1047186, 916009] processed_samples 8800 unjoint_samples 8800 joint_samples 26 [1047186, 916009] [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure processed_samples 8900 unjoint_samples 8900 joint_samples 27 [1047186, 202985] processed_samples 8900 unjoint_samples 8900 joint_samples 27 [1047186, 202985] processed_samples 8900 unjoint_samples 8900 joint_samples 26 [168069, 1038984] processed_samples 8900 unjoint_samples 8900 joint_samples 26 [168069, 1038984] processed_samples 8900 unjoint_samples 8900 joint_samples 26 [1037486, 390010] processed_samples 8900 unjoint_samples 8900 joint_samples 26 [1037486, 390010] processed_samples 8900 unjoint_samples 8900 joint_samples 26 [66744, 1027910] processed_samples 8900 unjoint_samples 8900 joint_samples 26 [66744, 1027910] processed_samples 8900 unjoint_samples 8900 joint_samples 27 [336113, 1046320] processed_samples 8900 unjoint_samples 8900 joint_samples 27 [336113, 1046320] processed_samples 8900 unjoint_samples 8900 joint_samples 25 [690238, 1046559] processed_samples 8900 unjoint_samples 8900 joint_samples 25 [690238, 1046559] processed_samples 8900 unjoint_samples 8900 joint_samples 27 [399228, 1045274] processed_samples 8900 unjoint_samples 8900 joint_samples 27 [399228, 1045274] [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure processed_samples 8900 unjoint_samples 8900 joint_samples 26 [814685, 1046627] processed_samples 8900 unjoint_samples 8900 joint_samples 26 [814685, 1046627] [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d32c2340] mmco: unref short failure [h264 @ 0x55e9d32c2340] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure processed_samples 9000 unjoint_samples 9000 joint_samples 26 [497593, 1038984] processed_samples 9000 unjoint_samples 9000 joint_samples 26 [497593, 1038984] processed_samples 9000 unjoint_samples 9000 joint_samples 26 [407608, 1027910] processed_samples 9000 unjoint_samples 9000 joint_samples 26 [407608, 1027910] processed_samples 9000 unjoint_samples 9000 joint_samples 27 [1047186, 482679] processed_samples 9000 unjoint_samples 9000 joint_samples 27 [1047186, 482679] processed_samples 9000 unjoint_samples 9000 joint_samples 27 [1046330, 13562] processed_samples 9000 unjoint_samples 9000 joint_samples 27 [1046330, 13562] processed_samples 9000 unjoint_samples 9000 joint_samples 27 [695344, 1045274] processed_samples 9000 unjoint_samples 9000 joint_samples 26 [1037486, 742119] processed_samples 9000 unjoint_samples 9000 joint_samples 26 [1037486, 742119] processed_samples 9000 unjoint_samples 9000 joint_samples 27 [695344, 1045274] processed_samples 9000 unjoint_samples 9000 joint_samples 27 [614708, 1046320] processed_samples 9000 unjoint_samples 9000 joint_samples 27 [614708, 1046320] processed_samples 9000 unjoint_samples 9000 joint_samples 25 [1022721, 1046559] processed_samples 9000 unjoint_samples 9000 joint_samples 25 [1022721, 1046559] [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure processed_samples 9100 unjoint_samples 9100 joint_samples 26 [343769, 1046559] processed_samples 9100 unjoint_samples 9100 joint_samples 26 [343769, 1046559] processed_samples 9100 unjoint_samples 9100 joint_samples 27 [19540, 1046217] processed_samples 9100 unjoint_samples 9100 joint_samples 27 [19540, 1046217] processed_samples 9100 unjoint_samples 9100 joint_samples 27 [1046330, 330840] processed_samples 9100 unjoint_samples 9100 joint_samples 27 [1046330, 330840] processed_samples 9100 unjoint_samples 9100 joint_samples 26 [633548, 1027910] processed_samples 9100 unjoint_samples 9100 joint_samples 26 [633548, 1027910] processed_samples 9100 unjoint_samples 9100 joint_samples 27 [845628, 1046320] processed_samples 9100 unjoint_samples 9100 joint_samples 27 [845628, 1046320] processed_samples 9100 unjoint_samples 9100 joint_samples 26 [763457, 1038984] processed_samples 9100 unjoint_samples 9100 joint_samples 26 [763457, 1038984] processed_samples 9100 unjoint_samples 9100 joint_samples 27 [1028881, 1045274] processed_samples 9100 unjoint_samples 9100 joint_samples 27 [1047186, 843542] processed_samples 9100 unjoint_samples 9100 joint_samples 27 [1047186, 843542] processed_samples 9100 unjoint_samples 9100 joint_samples 27 [1028881, 1045274] [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d2389280] mmco: unref short failure [h264 @ 0x55e9d2389280] mmco: unref short failure [h264 @ 0x55e9d2389280] mmco: unref short failure [h264 @ 0x55e9d2389280] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3ad7ec440] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d2389280] mmco: unref short failure [h264 @ 0x55e9d2389280] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure processed_samples 9200 unjoint_samples 9200 joint_samples 26 [823309, 1046559] processed_samples 9200 unjoint_samples 9200 joint_samples 26 [823309, 1046559] processed_samples 9200 unjoint_samples 9200 joint_samples 28 [270744, 1047549] processed_samples 9200 unjoint_samples 9200 joint_samples 28 [270744, 1047549] processed_samples 9200 unjoint_samples 9200 joint_samples 27 [1042096, 138833] processed_samples 9200 unjoint_samples 9200 joint_samples 27 [1042096, 138833] processed_samples 9200 unjoint_samples 9200 joint_samples 28 [1047720, 296982] processed_samples 9200 unjoint_samples 9200 joint_samples 27 [365292, 1046217] processed_samples 9200 unjoint_samples 9200 joint_samples 28 [1047720, 296982] processed_samples 9200 unjoint_samples 9200 joint_samples 27 [365292, 1046217] processed_samples 9200 unjoint_samples 9200 joint_samples 28 [309206, 1047662] processed_samples 9200 unjoint_samples 9200 joint_samples 28 [309206, 1047662] processed_samples 9200 unjoint_samples 9200 joint_samples 27 [1046330, 648155] processed_samples 9200 unjoint_samples 9200 joint_samples 27 [1046330, 648155] processed_samples 9200 unjoint_samples 9200 joint_samples 26 [955844, 1027910] processed_samples 9200 unjoint_samples 9200 joint_samples 26 [955844, 1027910] [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure processed_samples 9300 unjoint_samples 9300 joint_samples 27 [25340, 1046842] processed_samples 9300 unjoint_samples 9300 joint_samples 27 [25340, 1046842] processed_samples 9300 unjoint_samples 9300 joint_samples 28 [598086, 1047549] processed_samples 9300 unjoint_samples 9300 joint_samples 28 [598086, 1047549] processed_samples 9300 unjoint_samples 9300 joint_samples 27 [187490, 1047199] processed_samples 9300 unjoint_samples 9300 joint_samples 27 [187490, 1047199] processed_samples 9300 unjoint_samples 9300 joint_samples 27 [1042096, 430856] processed_samples 9300 unjoint_samples 9300 joint_samples 27 [653736, 1046217] processed_samples 9300 unjoint_samples 9300 joint_samples 27 [1042096, 430856] processed_samples 9300 unjoint_samples 9300 joint_samples 27 [653736, 1046217] processed_samples 9300 unjoint_samples 9300 joint_samples 28 [711558, 1047662] processed_samples 9300 unjoint_samples 9300 joint_samples 28 [711558, 1047662] processed_samples 9300 unjoint_samples 9300 joint_samples 27 [1046330, 916429] processed_samples 9300 unjoint_samples 9300 joint_samples 27 [1046330, 916429] processed_samples 9300 unjoint_samples 9300 joint_samples 28 [1047720, 628027] processed_samples 9300 unjoint_samples 9300 joint_samples 28 [1047720, 628027] [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d4f56680] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d4663280] mmco: unref short failure [h264 @ 0x55e9d4663280] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d4663280] mmco: unref short failure [h264 @ 0x55e9d4663280] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3aa8a9ac0] mmco: unref short failure [h264 @ 0x55f3aa8a9ac0] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3aa8a9ac0] mmco: unref short failure [h264 @ 0x55f3aa8a9ac0] mmco: unref short failure processed_samples 9400 unjoint_samples 9400 joint_samples 27 [346786, 1046842] processed_samples 9400 unjoint_samples 9400 joint_samples 27 [421062, 1047199] processed_samples 9400 unjoint_samples 9400 joint_samples 27 [346786, 1046842] processed_samples 9400 unjoint_samples 9400 joint_samples 27 [421062, 1047199] processed_samples 9400 unjoint_samples 9400 joint_samples 28 [1047720, 886529] [h264 @ 0x55e9d5969500] mmco: unref short failure processed_samples 9400 unjoint_samples 9400 joint_samples 28 [1047720, 886529] processed_samples 9400 unjoint_samples 9400 joint_samples 28 [212077, 1046402] [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure processed_samples 9400 unjoint_samples 9400 joint_samples 28 [212077, 1046402] processed_samples 9400 unjoint_samples 9400 joint_samples 27 [1042096, 729803] processed_samples 9400 unjoint_samples 9400 joint_samples 28 [842333, 1047549] processed_samples 9400 unjoint_samples 9400 joint_samples 27 [1042096, 729803] processed_samples 9400 unjoint_samples 9400 joint_samples 28 [842333, 1047549] processed_samples 9400 unjoint_samples 9400 joint_samples 28 [1013012, 1047662] processed_samples 9400 unjoint_samples 9400 joint_samples 28 [1013012, 1047662] processed_samples 9400 unjoint_samples 9400 joint_samples 27 [946453, 1046217] [h264 @ 0x55e9d580bd40] mmco: unref short failure [h264 @ 0x55e9d580bd40] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure processed_samples 9400 unjoint_samples 9400 joint_samples 27 [946453, 1046217] [h264 @ 0x55f3aa8a9ac0] mmco: unref short failure [h264 @ 0x55f3aa8a9ac0] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d580bd40] mmco: unref short failure [h264 @ 0x55f3aa8a9ac0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d2389280] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure processed_samples 9500 unjoint_samples 9500 joint_samples 29 [285806, 1047549] processed_samples 9500 unjoint_samples 9500 joint_samples 29 [285806, 1047549] processed_samples 9500 unjoint_samples 9500 joint_samples 29 [1047720, 153189] processed_samples 9500 unjoint_samples 9500 joint_samples 28 [111281, 1021953] processed_samples 9500 unjoint_samples 9500 joint_samples 29 [1036975, 294026] processed_samples 9500 unjoint_samples 9500 joint_samples 29 [1047720, 153189] processed_samples 9500 unjoint_samples 9500 joint_samples 28 [111281, 1021953] processed_samples 9500 unjoint_samples 9500 joint_samples 28 [496473, 1046402] processed_samples 9500 unjoint_samples 9500 joint_samples 29 [1036975, 294026] processed_samples 9500 unjoint_samples 9500 joint_samples 28 [496473, 1046402] processed_samples 9500 unjoint_samples 9500 joint_samples 27 [739811, 1046842] processed_samples 9500 unjoint_samples 9500 joint_samples 27 [739811, 1046842] [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure processed_samples 9500 unjoint_samples 9500 joint_samples 28 [177504, 1047801] processed_samples 9500 unjoint_samples 9500 joint_samples 28 [177504, 1047801] processed_samples 9500 unjoint_samples 9500 joint_samples 27 [744681, 1047199] processed_samples 9500 unjoint_samples 9500 joint_samples 27 [744681, 1047199] [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d32c2340] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d7323280] mmco: unref short failure [h264 @ 0x55e9d30cb200] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure processed_samples 9600 unjoint_samples 9600 joint_samples 28 [1046373, 37424] processed_samples 9600 unjoint_samples 9600 joint_samples 28 [1046373, 37424] processed_samples 9600 unjoint_samples 9600 joint_samples 28 [349944, 1021953] processed_samples 9600 unjoint_samples 9600 joint_samples 29 [1047720, 453377] processed_samples 9600 unjoint_samples 9600 joint_samples 28 [471081, 1047801] processed_samples 9600 unjoint_samples 9600 joint_samples 28 [349944, 1021953] processed_samples 9600 unjoint_samples 9600 joint_samples 29 [568437, 1047549] processed_samples 9600 unjoint_samples 9600 joint_samples 29 [1047720, 453377] processed_samples 9600 unjoint_samples 9600 joint_samples 29 [1036975, 735934] processed_samples 9600 unjoint_samples 9600 joint_samples 28 [471081, 1047801] processed_samples 9600 unjoint_samples 9600 joint_samples 29 [568437, 1047549] processed_samples 9600 unjoint_samples 9600 joint_samples 28 [938785, 1046402] processed_samples 9600 unjoint_samples 9600 joint_samples 29 [1036975, 735934] processed_samples 9600 unjoint_samples 9600 joint_samples 28 [938785, 1046402] processed_samples 9600 unjoint_samples 9600 joint_samples 27 [1011692, 1047199] processed_samples 9600 unjoint_samples 9600 joint_samples 27 [1011692, 1047199] [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d580bd40] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure processed_samples 9700 unjoint_samples 9700 joint_samples 28 [226122, 1047199] processed_samples 9700 unjoint_samples 9700 joint_samples 28 [226122, 1047199] processed_samples 9700 unjoint_samples 9700 joint_samples 28 [770119, 1047801] processed_samples 9700 unjoint_samples 9700 joint_samples 28 [770119, 1047801] processed_samples 9700 unjoint_samples 9700 joint_samples 28 [1046373, 380607] processed_samples 9700 unjoint_samples 9700 joint_samples 28 [1046373, 380607] processed_samples 9700 unjoint_samples 9700 joint_samples 29 [197337, 1046402] processed_samples 9700 unjoint_samples 9700 joint_samples 29 [197337, 1046402] [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure processed_samples 9700 unjoint_samples 9700 joint_samples 28 [756827, 1021953] processed_samples 9700 unjoint_samples 9700 joint_samples 28 [756827, 1021953] [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure processed_samples 9700 unjoint_samples 9700 joint_samples 29 [989480, 1047549] processed_samples 9700 unjoint_samples 9700 joint_samples 29 [989480, 1047549] processed_samples 9700 unjoint_samples 9700 joint_samples 29 [1047720, 693274] processed_samples 9700 unjoint_samples 9700 joint_samples 29 [1047720, 693274] processed_samples 9700 unjoint_samples 9700 joint_samples 29 [1036975, 949591] processed_samples 9700 unjoint_samples 9700 joint_samples 29 [1036975, 949591] [h264 @ 0x55e9d09bb780] mmco: unref short failure [h264 @ 0x55e9d09bb780] mmco: unref short failure [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d4fa9080] mmco: unref short failure [h264 @ 0x55e9d4fa9080] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3a676fac0] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3a676fac0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d2523300] mmco: unref short failure [h264 @ 0x55e9d2523300] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure processed_samples 9800 unjoint_samples 9800 joint_samples 30 [245755, 1038945] processed_samples 9800 unjoint_samples 9800 joint_samples 30 [1045130, 245227] processed_samples 9800 unjoint_samples 9800 joint_samples 30 [1045130, 245227] processed_samples 9800 unjoint_samples 9800 joint_samples 30 [245755, 1038945] processed_samples 9800 unjoint_samples 9800 joint_samples 28 [1046373, 695140] processed_samples 9800 unjoint_samples 9800 joint_samples 28 [518918, 1047199] processed_samples 9800 unjoint_samples 9800 joint_samples 29 [465415, 1046402] processed_samples 9800 unjoint_samples 9800 joint_samples 29 [465415, 1046402] processed_samples 9800 unjoint_samples 9800 joint_samples 28 [518918, 1047199] processed_samples 9800 unjoint_samples 9800 joint_samples 28 [1046373, 695140] processed_samples 9800 unjoint_samples 9800 joint_samples 28 [1020770, 1047801] processed_samples 9800 unjoint_samples 9800 joint_samples 28 [1000220, 1021953] processed_samples 9800 unjoint_samples 9800 joint_samples 28 [1000220, 1021953] processed_samples 9800 unjoint_samples 9800 joint_samples 28 [1020770, 1047801] [h264 @ 0x55f3a6a52140] mmco: unref short failure processed_samples 9800 unjoint_samples 9800 joint_samples 29 [1047720, 994555] [h264 @ 0x55f3ab5a1540] mmco: unref short failure processed_samples 9800 unjoint_samples 9800 joint_samples 29 [1047720, 994555] [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d43cde40] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure processed_samples 9900 unjoint_samples 9900 joint_samples 28 [859414, 1047199] processed_samples 9900 unjoint_samples 9900 joint_samples 28 [859414, 1047199] processed_samples 9900 unjoint_samples 9900 joint_samples 29 [1046553, 148228] processed_samples 9900 unjoint_samples 9900 joint_samples 29 [1046553, 148228] processed_samples 9900 unjoint_samples 9900 joint_samples 30 [335684, 1046698] processed_samples 9900 unjoint_samples 9900 joint_samples 30 [335684, 1046698] processed_samples 9900 unjoint_samples 9900 joint_samples 30 [589555, 1038945] processed_samples 9900 unjoint_samples 9900 joint_samples 30 [589555, 1038945] processed_samples 9900 unjoint_samples 9900 joint_samples 30 [1045130, 507887] processed_samples 9900 unjoint_samples 9900 joint_samples 30 [1045130, 507887] processed_samples 9900 unjoint_samples 9900 joint_samples 29 [780332, 1046402] processed_samples 9900 unjoint_samples 9900 joint_samples 29 [780332, 1046402] processed_samples 9900 unjoint_samples 9900 joint_samples 29 [1043335, 293031] processed_samples 9900 unjoint_samples 9900 joint_samples 29 [1043335, 293031] processed_samples 9900 unjoint_samples 9900 joint_samples 28 [1046373, 1000371] processed_samples 9900 unjoint_samples 9900 joint_samples 28 [1046373, 1000371] [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d4663280] mmco: unref short failure [h264 @ 0x55e9d4663280] mmco: unref short failure [h264 @ 0x55e9d7323280] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure processed_samples 10000 unjoint_samples 10000 joint_samples 29 [1022668, 154461] processed_samples 10000 unjoint_samples 10000 joint_samples 29 [1046373, 290085] processed_samples 10000 unjoint_samples 10000 joint_samples 29 [1022668, 154461] processed_samples 10000 unjoint_samples 10000 joint_samples 29 [1046373, 290085] processed_samples 10000 unjoint_samples 10000 joint_samples 29 [1043335, 532885] processed_samples 10000 unjoint_samples 10000 joint_samples 29 [1043335, 532885] processed_samples 10000 unjoint_samples 10000 joint_samples 30 [926729, 1038945] processed_samples 10000 unjoint_samples 10000 joint_samples 30 [926729, 1038945] processed_samples 10000 unjoint_samples 10000 joint_samples 30 [1042833, 134708] processed_samples 10000 unjoint_samples 10000 joint_samples 30 [1042833, 134708] processed_samples 10000 unjoint_samples 10000 joint_samples 29 [1046553, 351217] processed_samples 10000 unjoint_samples 10000 joint_samples 29 [1046553, 351217] processed_samples 10000 unjoint_samples 10000 joint_samples 30 [682760, 1046698] processed_samples 10000 unjoint_samples 10000 joint_samples 30 [682760, 1046698] processed_samples 10000 unjoint_samples 10000 joint_samples 30 [1045130, 879890] processed_samples 10000 unjoint_samples 10000 joint_samples 30 [1045130, 879890] [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a8fe0440] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d09bb780] mmco: unref short failure [h264 @ 0x55e9d09bb780] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure processed_samples 10100 unjoint_samples 10100 joint_samples 29 [1022668, 476688] processed_samples 10100 unjoint_samples 10100 joint_samples 29 [1022668, 476688] processed_samples 10100 unjoint_samples 10100 joint_samples 29 [1046373, 591032] processed_samples 10100 unjoint_samples 10100 joint_samples 29 [1046373, 591032] processed_samples 10100 unjoint_samples 10100 joint_samples 30 [1042833, 369274] processed_samples 10100 unjoint_samples 10100 joint_samples 31 [1047439, 107183] processed_samples 10100 unjoint_samples 10100 joint_samples 31 [1047439, 107183] processed_samples 10100 unjoint_samples 10100 joint_samples 30 [1042833, 369274] processed_samples 10100 unjoint_samples 10100 joint_samples 29 [1046553, 609783] processed_samples 10100 unjoint_samples 10100 joint_samples 29 [1046553, 609783] processed_samples 10100 unjoint_samples 10100 joint_samples 29 [1043335, 910419] processed_samples 10100 unjoint_samples 10100 joint_samples 31 [1048537, 191661] processed_samples 10100 unjoint_samples 10100 joint_samples 31 [1048537, 191661] processed_samples 10100 unjoint_samples 10100 joint_samples 29 [1043335, 910419] processed_samples 10100 unjoint_samples 10100 joint_samples 30 [934593, 1046698] processed_samples 10100 unjoint_samples 10100 joint_samples 30 [934593, 1046698] [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d4f56680] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure processed_samples 10200 unjoint_samples 10200 joint_samples 30 [1045636, 186063] processed_samples 10200 unjoint_samples 10200 joint_samples 30 [1045636, 186063] processed_samples 10200 unjoint_samples 10200 joint_samples 31 [1041074, 218126] processed_samples 10200 unjoint_samples 10200 joint_samples 31 [1041074, 218126] processed_samples 10200 unjoint_samples 10200 joint_samples 31 [1048537, 538490] processed_samples 10200 unjoint_samples 10200 joint_samples 31 [1048537, 538490] processed_samples 10200 unjoint_samples 10200 joint_samples 29 [1022668, 809790] processed_samples 10200 unjoint_samples 10200 joint_samples 29 [1022668, 809790] processed_samples 10200 unjoint_samples 10200 joint_samples 29 [1046373, 872598] processed_samples 10200 unjoint_samples 10200 joint_samples 29 [1046373, 872598] processed_samples 10200 unjoint_samples 10200 joint_samples 30 [1042833, 664986] processed_samples 10200 unjoint_samples 10200 joint_samples 30 [1042833, 664986] processed_samples 10200 unjoint_samples 10200 joint_samples 29 [1046553, 880320] processed_samples 10200 unjoint_samples 10200 joint_samples 29 [1046553, 880320] processed_samples 10200 unjoint_samples 10200 joint_samples 31 [1047439, 444395] processed_samples 10200 unjoint_samples 10200 joint_samples 31 [1047439, 444395] [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure processed_samples 10300 unjoint_samples 10300 joint_samples 30 [221750, 1036052] processed_samples 10300 unjoint_samples 10300 joint_samples 30 [221750, 1036052] processed_samples 10300 unjoint_samples 10300 joint_samples 30 [18443, 1047193] processed_samples 10300 unjoint_samples 10300 joint_samples 30 [18443, 1047193] processed_samples 10300 unjoint_samples 10300 joint_samples 31 [127466, 992417] processed_samples 10300 unjoint_samples 10300 joint_samples 31 [127466, 992417] processed_samples 10300 unjoint_samples 10300 joint_samples 30 [1046553, 82694] processed_samples 10300 unjoint_samples 10300 joint_samples 30 [1046553, 82694] processed_samples 10300 unjoint_samples 10300 joint_samples 31 [1041074, 472595] processed_samples 10300 unjoint_samples 10300 joint_samples 31 [1041074, 472595] processed_samples 10300 unjoint_samples 10300 joint_samples 30 [1045636, 480561] processed_samples 10300 unjoint_samples 10300 joint_samples 30 [1045636, 480561] processed_samples 10300 unjoint_samples 10300 joint_samples 31 [1047439, 709875] processed_samples 10300 unjoint_samples 10300 joint_samples 31 [1048537, 824836] processed_samples 10300 unjoint_samples 10300 joint_samples 31 [1047439, 709875] processed_samples 10300 unjoint_samples 10300 joint_samples 31 [1048537, 824836] [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure processed_samples 10400 unjoint_samples 10400 joint_samples 32 [1048537, 48836] processed_samples 10400 unjoint_samples 10400 joint_samples 32 [1048537, 48836] processed_samples 10400 unjoint_samples 10400 joint_samples 30 [311670, 1047193] processed_samples 10400 unjoint_samples 10400 joint_samples 30 [311670, 1047193] processed_samples 10400 unjoint_samples 10400 joint_samples 30 [599595, 1036052] processed_samples 10400 unjoint_samples 10400 joint_samples 30 [599595, 1036052] processed_samples 10400 unjoint_samples 10400 joint_samples 30 [1045636, 805290] processed_samples 10400 unjoint_samples 10400 joint_samples 30 [1045636, 805290] processed_samples 10400 unjoint_samples 10400 joint_samples 31 [440973, 992417] processed_samples 10400 unjoint_samples 10400 joint_samples 31 [440973, 992417] processed_samples 10400 unjoint_samples 10400 joint_samples 30 [1046553, 351274] processed_samples 10400 unjoint_samples 10400 joint_samples 30 [1046553, 351274] processed_samples 10400 unjoint_samples 10400 joint_samples 31 [1041074, 805084] processed_samples 10400 unjoint_samples 10400 joint_samples 31 [1041074, 805084] processed_samples 10400 unjoint_samples 10400 joint_samples 31 [1047439, 953505] processed_samples 10400 unjoint_samples 10400 joint_samples 31 [1047439, 953505] [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d4fa9080] mmco: unref short failure [h264 @ 0x55e9d4fa9080] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure processed_samples 10500 unjoint_samples 10500 joint_samples 31 [55050, 1029703] processed_samples 10500 unjoint_samples 10500 joint_samples 31 [55050, 1029703] processed_samples 10500 unjoint_samples 10500 joint_samples 32 [196031, 1046831] processed_samples 10500 unjoint_samples 10500 joint_samples 32 [196031, 1046831] processed_samples 10500 unjoint_samples 10500 joint_samples 32 [1048537, 323377] processed_samples 10500 unjoint_samples 10500 joint_samples 32 [59308, 1046469] processed_samples 10500 unjoint_samples 10500 joint_samples 32 [1048537, 323377] processed_samples 10500 unjoint_samples 10500 joint_samples 32 [59308, 1046469] processed_samples 10500 unjoint_samples 10500 joint_samples 30 [1046553, 647191] processed_samples 10500 unjoint_samples 10500 joint_samples 30 [1046553, 647191] processed_samples 10500 unjoint_samples 10500 joint_samples 30 [889058, 1036052] processed_samples 10500 unjoint_samples 10500 joint_samples 30 [889058, 1036052] processed_samples 10500 unjoint_samples 10500 joint_samples 30 [552947, 1047193] processed_samples 10500 unjoint_samples 10500 joint_samples 30 [552947, 1047193] processed_samples 10500 unjoint_samples 10500 joint_samples 31 [746995, 992417] [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure processed_samples 10500 unjoint_samples 10500 joint_samples 31 [746995, 992417] [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure processed_samples 10600 unjoint_samples 10600 joint_samples 31 [1046379, 218203] processed_samples 10600 unjoint_samples 10600 joint_samples 31 [1046379, 218203] processed_samples 10600 unjoint_samples 10600 joint_samples 32 [408012, 1046469] processed_samples 10600 unjoint_samples 10600 joint_samples 32 [408012, 1046469] processed_samples 10600 unjoint_samples 10600 joint_samples 31 [392229, 1029703] processed_samples 10600 unjoint_samples 10600 joint_samples 31 [392229, 1029703] processed_samples 10600 unjoint_samples 10600 joint_samples 32 [1048537, 706085] processed_samples 10600 unjoint_samples 10600 joint_samples 32 [1048537, 706085] processed_samples 10600 unjoint_samples 10600 joint_samples 30 [1046553, 897273] processed_samples 10600 unjoint_samples 10600 joint_samples 30 [1046553, 897273] processed_samples 10600 unjoint_samples 10600 joint_samples 32 [511573, 1046831] processed_samples 10600 unjoint_samples 10600 joint_samples 32 [511573, 1046831] processed_samples 10600 unjoint_samples 10600 joint_samples 31 [1022165, 1021403] processed_samples 10600 unjoint_samples 10600 joint_samples 31 [1022165, 1021403] processed_samples 10600 unjoint_samples 10600 joint_samples 30 [902688, 1047193] processed_samples 10600 unjoint_samples 10600 joint_samples 30 [902688, 1047193] [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3ae983680] mmco: unref short failure [h264 @ 0x55f3ae983680] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure processed_samples 10700 unjoint_samples 10700 joint_samples 31 [1045583, 134653] processed_samples 10700 unjoint_samples 10700 joint_samples 31 [1045583, 134653] processed_samples 10700 unjoint_samples 10700 joint_samples 31 [1046379, 650494] processed_samples 10700 unjoint_samples 10700 joint_samples 31 [1046379, 650494] processed_samples 10700 unjoint_samples 10700 joint_samples 31 [1046553, 161183] processed_samples 10700 unjoint_samples 10700 joint_samples 31 [736974, 1029703] processed_samples 10700 unjoint_samples 10700 joint_samples 31 [736974, 1029703] processed_samples 10700 unjoint_samples 10700 joint_samples 31 [1046553, 161183] processed_samples 10700 unjoint_samples 10700 joint_samples 32 [270339, 1046999] processed_samples 10700 unjoint_samples 10700 joint_samples 32 [1048537, 1035954] processed_samples 10700 unjoint_samples 10700 joint_samples 32 [270339, 1046999] processed_samples 10700 unjoint_samples 10700 joint_samples 32 [1048537, 1035954] processed_samples 10700 unjoint_samples 10700 joint_samples 32 [762082, 1046469] processed_samples 10700 unjoint_samples 10700 joint_samples 32 [762082, 1046469] processed_samples 10700 unjoint_samples 10700 joint_samples 32 [773657, 1046831] processed_samples 10700 unjoint_samples 10700 joint_samples 32 [773657, 1046831] [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure processed_samples 10800 unjoint_samples 10800 joint_samples 31 [1046379, 958215] processed_samples 10800 unjoint_samples 10800 joint_samples 31 [1046379, 958215] processed_samples 10800 unjoint_samples 10800 joint_samples 33 [99545, 1046831] processed_samples 10800 unjoint_samples 10800 joint_samples 33 [99545, 1046831] processed_samples 10800 unjoint_samples 10800 joint_samples 33 [1022975, 76246] processed_samples 10800 unjoint_samples 10800 joint_samples 33 [1022975, 76246] processed_samples 10800 unjoint_samples 10800 joint_samples 31 [1046553, 489755] processed_samples 10800 unjoint_samples 10800 joint_samples 31 [1046553, 489755] processed_samples 10800 unjoint_samples 10800 joint_samples 33 [289394, 1047094] processed_samples 10800 unjoint_samples 10800 joint_samples 33 [289394, 1047094] processed_samples 10800 unjoint_samples 10800 joint_samples 32 [523353, 1046999] processed_samples 10800 unjoint_samples 10800 joint_samples 32 [523353, 1046999] processed_samples 10800 unjoint_samples 10800 joint_samples 31 [1033401, 1035723] processed_samples 10800 unjoint_samples 10800 joint_samples 31 [1033401, 1035723] processed_samples 10800 unjoint_samples 10800 joint_samples 31 [1045583, 711939] processed_samples 10800 unjoint_samples 10800 joint_samples 31 [1045583, 711939] [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3a676fac0] mmco: unref short failure [h264 @ 0x55f3a676fac0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d34cfd80] mmco: unref short failure [h264 @ 0x55e9d34cfd80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d34cfd80] mmco: unref short failure [h264 @ 0x55e9d34cfd80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a7574280] [h264 @ 0x55e9d5969500] mmco: unref short failure mmco: unref short failure [h264 @ 0x55e9d5969500] [h264 @ 0x55f3a7574280] mmco: unref short failure mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d43cde40] mmco: unref short failure [h264 @ 0x55e9d43cde40] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure processed_samples 10900 unjoint_samples 10900 joint_samples 32 [1038260, 386058] processed_samples 10900 unjoint_samples 10900 joint_samples 32 [1038260, 386058] processed_samples 10900 unjoint_samples 10900 joint_samples 32 [150278, 1042575] processed_samples 10900 unjoint_samples 10900 joint_samples 32 [150278, 1042575] processed_samples 10900 unjoint_samples 10900 joint_samples 32 [1046379, 249875] processed_samples 10900 unjoint_samples 10900 joint_samples 32 [1046379, 249875] processed_samples 10900 unjoint_samples 10900 joint_samples 33 [652053, 1047094] processed_samples 10900 unjoint_samples 10900 joint_samples 33 [652053, 1047094] [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure processed_samples 10900 unjoint_samples 10900 joint_samples 33 [1022975, 455619] processed_samples 10900 unjoint_samples 10900 joint_samples 33 [1022975, 455619] processed_samples 10900 unjoint_samples 10900 joint_samples 33 [517900, 1046831] processed_samples 10900 unjoint_samples 10900 joint_samples 33 [517900, 1046831] processed_samples 10900 unjoint_samples 10900 joint_samples 32 [868569, 1046999] processed_samples 10900 unjoint_samples 10900 joint_samples 32 [868569, 1046999] processed_samples 10900 unjoint_samples 10900 joint_samples 31 [1046553, 776054] processed_samples 10900 unjoint_samples 10900 joint_samples 31 [1046553, 776054] [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3aacaed80] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3aacaed80] mmco: unref short failure [h264 @ 0x55f3aacaed80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d4a08d80] mmco: unref short failure [h264 @ 0x55e9d4a08d80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [Errno 108] Cannot send after transport endpoint shutdown: '/data_2/output' [Errno 108] Cannot send after transport endpoint shutdown: 'data_2' [Errno 108] Cannot send after transport endpoint shutdown: 'data_2' [Errno 108] Cannot send after transport endpoint shutdown: '/data_2/output' [Errno 108] Cannot send after transport endpoint shutdown: '/data_2/output/LM' [Errno 108] Cannot send after transport endpoint shutdown: '/data_2/output/LM' [Errno 108] Cannot send after transport endpoint shutdown: '/data_2/output/LM' [Errno 108] Cannot send after transport endpoint shutdown: '/data_2/output/LM' [Errno 108] Cannot send after transport endpoint shutdown: '/data_2/output/LM' [Errno 108] Cannot send after transport endpoint shutdown: '/data_2/output/LM' [Errno 108] Cannot send after transport endpoint shutdown: '/data_2/output/LM' [Errno 108] Cannot send after transport endpoint shutdown: '/data_2/output/LM' [Errno 108] Cannot send after transport endpoint shutdown: '/data_2/output/LM' [Errno 108] Cannot send after transport endpoint shutdown: '/data_2/output/LM' [Errno 108] Cannot send after transport endpoint shutdown: '/data_2/output/LM' [Errno 108] Cannot send after transport endpoint shutdown: '/data_2/output/LM' [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure processed_samples 11000 unjoint_samples 11000 joint_samples 32 [486754, 1042575] processed_samples 11000 unjoint_samples 11000 joint_samples 32 [486754, 1042575] [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure processed_samples 11000 unjoint_samples 11000 joint_samples 32 [1046379, 530936] processed_samples 11000 unjoint_samples 11000 joint_samples 32 [1046379, 530936] processed_samples 11000 unjoint_samples 11000 joint_samples 33 [1034847, 1047094] processed_samples 11000 unjoint_samples 11000 joint_samples 32 [1038260, 603082] processed_samples 11000 unjoint_samples 11000 joint_samples 33 [1034847, 1047094] processed_samples 11000 unjoint_samples 11000 joint_samples 32 [1038260, 603082] processed_samples 11000 unjoint_samples 11000 joint_samples 33 [131608, 1046999] processed_samples 11000 unjoint_samples 11000 joint_samples 33 [131608, 1046999] processed_samples 11000 unjoint_samples 11000 joint_samples 33 [1022975, 890250] processed_samples 11000 unjoint_samples 11000 joint_samples 33 [1022975, 890250] processed_samples 11000 unjoint_samples 11000 joint_samples 32 [160901, 1034771] processed_samples 11000 unjoint_samples 11000 joint_samples 32 [160901, 1034771] [h264 @ 0x55f3a8bed980] mmco: unref short failure processed_samples 11000 unjoint_samples 11000 joint_samples 33 [825771, 1046831] [h264 @ 0x55e9d11e4680] mmco: unref short failure processed_samples 11000 unjoint_samples 11000 joint_samples 33 [825771, 1046831] [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a67ce700] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d4663280] mmco: unref short failure [h264 @ 0x55e9d4663280] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure processed_samples 11100 unjoint_samples 11100 joint_samples 34 [1046786, 243622] processed_samples 11100 unjoint_samples 11100 joint_samples 34 [1046786, 243622] processed_samples 11100 unjoint_samples 11100 joint_samples 34 [1044378, 144201] processed_samples 11100 unjoint_samples 11100 joint_samples 34 [1044378, 144201] processed_samples 11100 unjoint_samples 11100 joint_samples 32 [1046379, 805679] processed_samples 11100 unjoint_samples 11100 joint_samples 34 [1046708, 73781] processed_samples 11100 unjoint_samples 11100 joint_samples 34 [1046708, 73781] processed_samples 11100 unjoint_samples 11100 joint_samples 32 [598745, 1034771] processed_samples 11100 unjoint_samples 11100 joint_samples 32 [598745, 1034771] processed_samples 11100 unjoint_samples 11100 joint_samples 32 [1046379, 805679] [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure processed_samples 11100 unjoint_samples 11100 joint_samples 33 [466674, 1046999] processed_samples 11100 unjoint_samples 11100 joint_samples 33 [466674, 1046999] processed_samples 11100 unjoint_samples 11100 joint_samples 32 [1038260, 866151] processed_samples 11100 unjoint_samples 11100 joint_samples 32 [1038260, 866151] processed_samples 11100 unjoint_samples 11100 joint_samples 32 [779163, 1042575] processed_samples 11100 unjoint_samples 11100 joint_samples 32 [779163, 1042575] [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3aaf595c0] mmco: unref short failure [h264 @ 0x55f3aaf595c0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure processed_samples 11200 unjoint_samples 11200 joint_samples 33 [1046379, 48806] processed_samples 11200 unjoint_samples 11200 joint_samples 33 [1046379, 48806] processed_samples 11200 unjoint_samples 11200 joint_samples 33 [1048459, 237166] processed_samples 11200 unjoint_samples 11200 joint_samples 33 [1048459, 237166] processed_samples 11200 unjoint_samples 11200 joint_samples 34 [1046786, 546697] processed_samples 11200 unjoint_samples 11200 joint_samples 34 [1046786, 546697] processed_samples 11200 unjoint_samples 11200 joint_samples 34 [1046708, 359045] processed_samples 11200 unjoint_samples 11200 joint_samples 34 [1046708, 359045] processed_samples 11200 unjoint_samples 11200 joint_samples 32 [1022830, 1042575] processed_samples 11200 unjoint_samples 11200 joint_samples 32 [1022830, 1042575] processed_samples 11200 unjoint_samples 11200 joint_samples 32 [944368, 1034771] processed_samples 11200 unjoint_samples 11200 joint_samples 32 [944368, 1034771] processed_samples 11200 unjoint_samples 11200 joint_samples 34 [1044378, 425258] processed_samples 11200 unjoint_samples 11200 joint_samples 34 [1044378, 425258] processed_samples 11200 unjoint_samples 11200 joint_samples 33 [716455, 1046999] processed_samples 11200 unjoint_samples 11200 joint_samples 33 [716455, 1046999] [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d2389280] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure processed_samples 11300 unjoint_samples 11300 joint_samples 33 [1046379, 318076] processed_samples 11300 unjoint_samples 11300 joint_samples 33 [1034722, 294948] processed_samples 11300 unjoint_samples 11300 joint_samples 33 [1046379, 318076] processed_samples 11300 unjoint_samples 11300 joint_samples 33 [1034722, 294948] processed_samples 11300 unjoint_samples 11300 joint_samples 33 [1048459, 523719] processed_samples 11300 unjoint_samples 11300 joint_samples 33 [1048459, 523719] processed_samples 11300 unjoint_samples 11300 joint_samples 34 [1044378, 689354] [h264 @ 0x55f3a7574280] mmco: unref short failure processed_samples 11300 unjoint_samples 11300 joint_samples 34 [1044378, 689354] processed_samples 11300 unjoint_samples 11300 joint_samples 34 [986382, 161022] [h264 @ 0x55e9d121c340] mmco: unref short failure processed_samples 11300 unjoint_samples 11300 joint_samples 34 [986382, 161022] processed_samples 11300 unjoint_samples 11300 joint_samples 34 [1046708, 638671] processed_samples 11300 unjoint_samples 11300 joint_samples 34 [1046708, 638671] processed_samples 11300 unjoint_samples 11300 joint_samples 33 [170173, 1046853] processed_samples 11300 unjoint_samples 11300 joint_samples 33 [170173, 1046853] processed_samples 11300 unjoint_samples 11300 joint_samples 34 [1046786, 851882] processed_samples 11300 unjoint_samples 11300 joint_samples 34 [1046786, 851882] [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a6e7c480] mmco: unref short failure [h264 @ 0x55f3a6e7c480] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3aa6f9540] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a6e7c480] mmco: unref short failure [h264 @ 0x55f3a6e7c480] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4663280] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure processed_samples 11400 unjoint_samples 11400 joint_samples 33 [1048459, 859836] processed_samples 11400 unjoint_samples 11400 joint_samples 33 [1048459, 859836] processed_samples 11400 unjoint_samples 11400 joint_samples 35 [66734, 1047963] processed_samples 11400 unjoint_samples 11400 joint_samples 35 [66734, 1047963] processed_samples 11400 unjoint_samples 11400 joint_samples 34 [986382, 507422] processed_samples 11400 unjoint_samples 11400 joint_samples 34 [986382, 507422] processed_samples 11400 unjoint_samples 11400 joint_samples 33 [432658, 1046853] processed_samples 11400 unjoint_samples 11400 joint_samples 33 [432658, 1046853] [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure processed_samples 11400 unjoint_samples 11400 joint_samples 33 [1034722, 728314] processed_samples 11400 unjoint_samples 11400 joint_samples 33 [1034722, 728314] processed_samples 11400 unjoint_samples 11400 joint_samples 34 [1046708, 934188] processed_samples 11400 unjoint_samples 11400 joint_samples 34 [1046708, 934188] processed_samples 11400 unjoint_samples 11400 joint_samples 34 [1044378, 1017657] processed_samples 11400 unjoint_samples 11400 joint_samples 34 [1044378, 1017657] processed_samples 11400 unjoint_samples 11400 joint_samples 33 [1046379, 662562] processed_samples 11400 unjoint_samples 11400 joint_samples 33 [1046379, 662562] [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [mov,mp4,m4a,3gp,3g2,mj2 @ 0x55f3a6641c40] stream 1, offset 0xbcbd: partial file [mov,mp4,m4a,3gp,3g2,mj2 @ 0x55f3a6641c40] stream 1, offset 0xbe30: partial file [mov,mp4,m4a,3gp,3g2,mj2 @ 0x55e9d6327080] stream 1, offset 0xbcbd: partial file [mov,mp4,m4a,3gp,3g2,mj2 @ 0x55e9d6327080] stream 1, offset 0xbe30: partial file [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a7be4680] mmco: unref short failure [h264 @ 0x55f3a7be4680] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure processed_samples 11500 unjoint_samples 11500 joint_samples 34 [83708, 1047843] processed_samples 11500 unjoint_samples 11500 joint_samples 35 [207809, 1039120] processed_samples 11500 unjoint_samples 11500 joint_samples 34 [83708, 1047843] processed_samples 11500 unjoint_samples 11500 joint_samples 35 [324561, 1047963] processed_samples 11500 unjoint_samples 11500 joint_samples 35 [207809, 1039120] processed_samples 11500 unjoint_samples 11500 joint_samples 35 [324561, 1047963] processed_samples 11500 unjoint_samples 11500 joint_samples 34 [986382, 802068] processed_samples 11500 unjoint_samples 11500 joint_samples 34 [986382, 802068] [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure processed_samples 11500 unjoint_samples 11500 joint_samples 35 [1046297, 364528] processed_samples 11500 unjoint_samples 11500 joint_samples 35 [1046297, 364528] processed_samples 11500 unjoint_samples 11500 joint_samples 33 [1046379, 910806] processed_samples 11500 unjoint_samples 11500 joint_samples 33 [1046379, 910806] processed_samples 11500 unjoint_samples 11500 joint_samples 33 [1034722, 998034] processed_samples 11501 unjoint_samples 11500 joint_samples 33 [893547, 1046853] processed_samples 11501 unjoint_samples 11500 joint_samples 33 [893547, 1046853] processed_samples 11500 unjoint_samples 11500 joint_samples 33 [1034722, 998034] [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d4f56680] mmco: unref short failure [h264 @ 0x55e9d4f56680] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d470a680] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d470a680] mmco: unref short failure [h264 @ 0x55e9d470a680] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure processed_samples 11600 unjoint_samples 11600 joint_samples 35 [460446, 1039120] processed_samples 11600 unjoint_samples 11600 joint_samples 35 [460446, 1039120] processed_samples 11600 unjoint_samples 11600 joint_samples 34 [1046379, 179904] processed_samples 11600 unjoint_samples 11600 joint_samples 34 [1046379, 179904] processed_samples 11600 unjoint_samples 11600 joint_samples 34 [1036826, 191065] processed_samples 11600 unjoint_samples 11600 joint_samples 34 [1036826, 191065] processed_samples 11600 unjoint_samples 11600 joint_samples 34 [383429, 1047843] processed_samples 11600 unjoint_samples 11600 joint_samples 34 [383429, 1047843] processed_samples 11601 unjoint_samples 11600 joint_samples 34 [1037313, 207525] processed_samples 11601 unjoint_samples 11600 joint_samples 34 [1037313, 207525] processed_samples 11600 unjoint_samples 11600 joint_samples 35 [705095, 1047963] processed_samples 11600 unjoint_samples 11600 joint_samples 35 [705095, 1047963] processed_samples 11600 unjoint_samples 11600 joint_samples 35 [1041248, 27375] processed_samples 11600 unjoint_samples 11600 joint_samples 35 [1041248, 27375] processed_samples 11600 unjoint_samples 11600 joint_samples 35 [1046297, 618480] processed_samples 11600 unjoint_samples 11600 joint_samples 35 [1046297, 618480] [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a6f63e00] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a6f63e00] mmco: unref short failure [h264 @ 0x55f3a6f63e00] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aa373140] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3a679f200] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure processed_samples 11700 unjoint_samples 11700 joint_samples 34 [1046379, 479019] processed_samples 11700 unjoint_samples 11700 joint_samples 34 [1046379, 479019] processed_samples 11700 unjoint_samples 11700 joint_samples 34 [647994, 1047843] processed_samples 11700 unjoint_samples 11700 joint_samples 34 [647994, 1047843] processed_samples 11700 unjoint_samples 11700 joint_samples 35 [1041248, 290890] processed_samples 11700 unjoint_samples 11700 joint_samples 35 [1041248, 290890] processed_samples 11700 unjoint_samples 11700 joint_samples 34 [1036826, 487165] processed_samples 11700 unjoint_samples 11700 joint_samples 34 [1036826, 487165] processed_samples 11700 unjoint_samples 11700 joint_samples 35 [771758, 1039120] processed_samples 11700 unjoint_samples 11700 joint_samples 35 [771758, 1039120] processed_samples 11700 unjoint_samples 11700 joint_samples 35 [964502, 1047963] processed_samples 11700 unjoint_samples 11700 joint_samples 35 [1046297, 865736] processed_samples 11700 unjoint_samples 11700 joint_samples 35 [964502, 1047963] processed_samples 11700 unjoint_samples 11700 joint_samples 35 [1046297, 865736] processed_samples 11701 unjoint_samples 11700 joint_samples 34 [1037313, 515244] processed_samples 11701 unjoint_samples 11700 joint_samples 34 [1037313, 515244] [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d4f56680] mmco: unref short failure [h264 @ 0x55e9d4f56680] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3a6c132c0] mmco: unref short failure [h264 @ 0x55f3a6c132c0] mmco: unref short failure [h264 @ 0x55f3a6c132c0] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a6b8a580] mmco: unref short failure [h264 @ 0x55f3a6b8a580] mmco: unref short failure [h264 @ 0x55e9d11e4680] [h264 @ 0x55f3aa6b83c0] mmco: unref short failure mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d568b380] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d568b380] mmco: unref short failure [h264 @ 0x55e9d568b380] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d568b380] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure processed_samples 11800 unjoint_samples 11800 joint_samples 34 [1036826, 710916] processed_samples 11800 unjoint_samples 11800 joint_samples 34 [1036826, 710916] processed_samples 11800 unjoint_samples 11800 joint_samples 35 [1047420, 8349] processed_samples 11800 unjoint_samples 11800 joint_samples 35 [1047420, 8349] processed_samples 11800 unjoint_samples 11800 joint_samples 36 [1047192, 198117] processed_samples 11800 unjoint_samples 11800 joint_samples 36 [1047192, 198117] processed_samples 11800 unjoint_samples 11800 joint_samples 36 [116514, 1046971] processed_samples 11800 unjoint_samples 11800 joint_samples 36 [116514, 1046971] [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure processed_samples 11800 unjoint_samples 11800 joint_samples 34 [1046379, 773943] processed_samples 11800 unjoint_samples 11800 joint_samples 34 [1046379, 773943] processed_samples 11800 unjoint_samples 11800 joint_samples 35 [1041248, 567204] processed_samples 11800 unjoint_samples 11800 joint_samples 35 [1041248, 567204] processed_samples 11800 unjoint_samples 11800 joint_samples 35 [1029332, 1039120] processed_samples 11800 unjoint_samples 11800 joint_samples 35 [1029332, 1039120] processed_samples 11801 unjoint_samples 11800 joint_samples 34 [1037313, 855251] processed_samples 11801 unjoint_samples 11800 joint_samples 34 [1037313, 855251] [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55e9d4f56680] mmco: unref short failure [h264 @ 0x55e9d4f56680] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure processed_samples 11900 unjoint_samples 11900 joint_samples 35 [231947, 950591] processed_samples 11900 unjoint_samples 11900 joint_samples 36 [1047192, 527059] processed_samples 11900 unjoint_samples 11900 joint_samples 36 [1047192, 527059] processed_samples 11900 unjoint_samples 11900 joint_samples 35 [231947, 950591] processed_samples 11900 unjoint_samples 11900 joint_samples 36 [1047045, 398824] processed_samples 11900 unjoint_samples 11900 joint_samples 36 [1047045, 398824] processed_samples 11900 unjoint_samples 11900 joint_samples 35 [1047420, 301386] processed_samples 11900 unjoint_samples 11900 joint_samples 35 [1047420, 301386] processed_samples 11900 unjoint_samples 11900 joint_samples 36 [377821, 1046971] processed_samples 11900 unjoint_samples 11900 joint_samples 36 [377821, 1046971] [h264 @ 0x55e9d3bc39c0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure processed_samples 11901 unjoint_samples 11900 joint_samples 35 [1046776, 57211] processed_samples 11901 unjoint_samples 11900 joint_samples 35 [1046776, 57211] processed_samples 11900 unjoint_samples 11900 joint_samples 35 [1041248, 889781] processed_samples 11900 unjoint_samples 11900 joint_samples 35 [1041248, 889781] processed_samples 11900 unjoint_samples 11900 joint_samples 34 [1036826, 989568] processed_samples 11900 unjoint_samples 11900 joint_samples 34 [1036826, 989568] [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d378a740] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure processed_samples 12000 unjoint_samples 12000 joint_samples 35 [1041436, 347766] processed_samples 12000 unjoint_samples 12000 joint_samples 35 [1041436, 347766] processed_samples 12000 unjoint_samples 12000 joint_samples 36 [1045319, 160075] processed_samples 12000 unjoint_samples 12000 joint_samples 36 [1045319, 160075] processed_samples 12000 unjoint_samples 12000 joint_samples 35 [570777, 950591] processed_samples 12000 unjoint_samples 12000 joint_samples 35 [570777, 950591] processed_samples 12000 unjoint_samples 12000 joint_samples 35 [1047420, 688324] processed_samples 12000 unjoint_samples 12000 joint_samples 35 [1047420, 688324] processed_samples 12000 unjoint_samples 12000 joint_samples 36 [772308, 1046971] processed_samples 12000 unjoint_samples 12000 joint_samples 36 [772308, 1046971] processed_samples 12000 unjoint_samples 12000 joint_samples 36 [1047045, 679496] processed_samples 12000 unjoint_samples 12000 joint_samples 36 [1047045, 679496] processed_samples 12000 unjoint_samples 12000 joint_samples 36 [1047192, 811637] processed_samples 12000 unjoint_samples 12000 joint_samples 36 [1047192, 811637] processed_samples 12001 unjoint_samples 12000 joint_samples 35 [1046776, 550538] processed_samples 12001 unjoint_samples 12000 joint_samples 35 [1046776, 550538] [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure processed_samples 12100 unjoint_samples 12100 joint_samples 35 [1041436, 644804] processed_samples 12100 unjoint_samples 12100 joint_samples 35 [1041436, 644804] processed_samples 12100 unjoint_samples 12100 joint_samples 37 [18715, 1046977] processed_samples 12100 unjoint_samples 12100 joint_samples 37 [18715, 1046977] processed_samples 12100 unjoint_samples 12100 joint_samples 37 [39170, 1046971] processed_samples 12100 unjoint_samples 12100 joint_samples 37 [39170, 1046971] processed_samples 12100 unjoint_samples 12100 joint_samples 36 [1045319, 571706] processed_samples 12100 unjoint_samples 12100 joint_samples 36 [1045319, 571706] processed_samples 12100 unjoint_samples 12100 joint_samples 37 [69311, 1046918] processed_samples 12100 unjoint_samples 12100 joint_samples 37 [69311, 1046918] [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure processed_samples 12100 unjoint_samples 12100 joint_samples 35 [1047420, 946950] processed_samples 12100 unjoint_samples 12100 joint_samples 35 [1047420, 946950] [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure processed_samples 12100 unjoint_samples 12100 joint_samples 35 [984521, 961762] processed_samples 12100 unjoint_samples 12100 joint_samples 35 [984521, 961762] processed_samples 12101 unjoint_samples 12100 joint_samples 35 [1046776, 947227] processed_samples 12101 unjoint_samples 12100 joint_samples 35 [1046776, 947227] [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d3bc39c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure processed_samples 12200 unjoint_samples 12200 joint_samples 36 [245345, 1014124] processed_samples 12200 unjoint_samples 12200 joint_samples 36 [245345, 1014124] processed_samples 12200 unjoint_samples 12200 joint_samples 37 [308361, 1046977] processed_samples 12200 unjoint_samples 12200 joint_samples 37 [308361, 1046977] processed_samples 12200 unjoint_samples 12200 joint_samples 37 [377912, 1046971] processed_samples 12200 unjoint_samples 12200 joint_samples 37 [377912, 1046971] processed_samples 12200 unjoint_samples 12200 joint_samples 37 [300614, 1046918] processed_samples 12200 unjoint_samples 12200 joint_samples 37 [300614, 1046918] processed_samples 12200 unjoint_samples 12200 joint_samples 36 [292855, 1037836] processed_samples 12200 unjoint_samples 12200 joint_samples 36 [292855, 1037836] processed_samples 12201 unjoint_samples 12200 joint_samples 36 [233297, 1048062] processed_samples 12201 unjoint_samples 12200 joint_samples 36 [233297, 1048062] processed_samples 12200 unjoint_samples 12200 joint_samples 36 [1045319, 865826] processed_samples 12200 unjoint_samples 12200 joint_samples 36 [1045319, 865826] processed_samples 12200 unjoint_samples 12200 joint_samples 35 [1041436, 1001319] processed_samples 12200 unjoint_samples 12200 joint_samples 35 [1041436, 1001319] [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d357d100] mmco: unref short failure [h264 @ 0x55e9d357d100] mmco: unref short failure [h264 @ 0x55e9d2a44f40] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure processed_samples 12300 unjoint_samples 12300 joint_samples 36 [575523, 1014124] processed_samples 12300 unjoint_samples 12300 joint_samples 36 [575523, 1014124] processed_samples 12300 unjoint_samples 12300 joint_samples 36 [646309, 1037836] processed_samples 12300 unjoint_samples 12300 joint_samples 36 [646309, 1037836] processed_samples 12300 unjoint_samples 12300 joint_samples 36 [1043382, 224717] processed_samples 12300 unjoint_samples 12300 joint_samples 36 [1043382, 224717] processed_samples 12300 unjoint_samples 12300 joint_samples 37 [723507, 1046971] processed_samples 12300 unjoint_samples 12300 joint_samples 37 [723507, 1046971] processed_samples 12300 unjoint_samples 12300 joint_samples 37 [1045319, 68214] processed_samples 12300 unjoint_samples 12300 joint_samples 37 [1045319, 68214] processed_samples 12301 unjoint_samples 12300 joint_samples 36 [647769, 1048062] processed_samples 12301 unjoint_samples 12300 joint_samples 36 [647769, 1048062] processed_samples 12300 unjoint_samples 12300 joint_samples 37 [829539, 1046977] processed_samples 12300 unjoint_samples 12300 joint_samples 37 [829539, 1046977] processed_samples 12300 unjoint_samples 12300 joint_samples 37 [555238, 1046918] processed_samples 12300 unjoint_samples 12300 joint_samples 37 [555238, 1046918] [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55e9d446c1c0] mmco: unref short failure [h264 @ 0x55e9d446c1c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3ad7ec440] mmco: unref short failure [h264 @ 0x55f3ad7ec440] mmco: unref short failure [h264 @ 0x55f3ad7ec440] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure processed_samples 12400 unjoint_samples 12400 joint_samples 36 [1043382, 459152] processed_samples 12400 unjoint_samples 12400 joint_samples 38 [1046948, 33606] processed_samples 12400 unjoint_samples 12400 joint_samples 36 [1043382, 459152] processed_samples 12400 unjoint_samples 12400 joint_samples 38 [1046948, 33606] processed_samples 12400 unjoint_samples 12400 joint_samples 37 [1045319, 389507] processed_samples 12400 unjoint_samples 12400 joint_samples 37 [1045319, 389507] processed_samples 12400 unjoint_samples 12400 joint_samples 36 [845713, 1014124] processed_samples 12400 unjoint_samples 12400 joint_samples 36 [845713, 1014124] processed_samples 12400 unjoint_samples 12400 joint_samples 38 [57145, 1046977] processed_samples 12400 unjoint_samples 12400 joint_samples 38 [57145, 1046977] processed_samples 12400 unjoint_samples 12400 joint_samples 37 [859054, 1046918] processed_samples 12400 unjoint_samples 12400 joint_samples 37 [859054, 1046918] processed_samples 12400 unjoint_samples 12400 joint_samples 36 [910078, 1037836] processed_samples 12400 unjoint_samples 12400 joint_samples 36 [910078, 1037836] processed_samples 12401 unjoint_samples 12400 joint_samples 36 [1033962, 1048062] processed_samples 12401 unjoint_samples 12400 joint_samples 36 [1033962, 1048062] [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aaff3800] mmco: unref short failure [h264 @ 0x55f3aaff3800] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55e9d2389280] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d4cc9740] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d4cc9740] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure processed_samples 12500 unjoint_samples 12500 joint_samples 38 [1003555, 261626] [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure processed_samples 12500 unjoint_samples 12500 joint_samples 37 [114533, 1047381] processed_samples 12500 unjoint_samples 12500 joint_samples 38 [1003555, 261626] [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure processed_samples 12500 unjoint_samples 12500 joint_samples 37 [114533, 1047381] processed_samples 12500 unjoint_samples 12500 joint_samples 38 [436656, 1046977] processed_samples 12500 unjoint_samples 12500 joint_samples 38 [1046948, 358172] processed_samples 12500 unjoint_samples 12500 joint_samples 38 [436656, 1046977] processed_samples 12500 unjoint_samples 12500 joint_samples 38 [1046948, 358172] processed_samples 12500 unjoint_samples 12500 joint_samples 37 [1045319, 868041] processed_samples 12500 unjoint_samples 12500 joint_samples 37 [1045319, 868041] processed_samples 12500 unjoint_samples 12500 joint_samples 36 [1043382, 767130] processed_samples 12500 unjoint_samples 12500 joint_samples 36 [1043382, 767130] processed_samples 12500 unjoint_samples 12500 joint_samples 36 [1042099, 1043737] processed_samples 12500 unjoint_samples 12500 joint_samples 36 [1042099, 1043737] processed_samples 12501 unjoint_samples 12500 joint_samples 37 [1033962, 460842] processed_samples 12501 unjoint_samples 12500 joint_samples 37 [1033962, 460842] [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d446c1c0] mmco: unref short failure [h264 @ 0x55e9d446c1c0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure processed_samples 12600 unjoint_samples 12600 joint_samples 37 [1046420, 277468] processed_samples 12600 unjoint_samples 12600 joint_samples 37 [1046420, 277468] processed_samples 12600 unjoint_samples 12600 joint_samples 38 [255269, 1046182] processed_samples 12600 unjoint_samples 12600 joint_samples 38 [255269, 1046182] processed_samples 12600 unjoint_samples 12600 joint_samples 37 [551838, 1047381] processed_samples 12600 unjoint_samples 12600 joint_samples 37 [551838, 1047381] processed_samples 12600 unjoint_samples 12600 joint_samples 38 [1003555, 613981] processed_samples 12600 unjoint_samples 12600 joint_samples 38 [1003555, 613981] processed_samples 12600 unjoint_samples 12600 joint_samples 38 [1046948, 674270] processed_samples 12600 unjoint_samples 12600 joint_samples 38 [1046948, 674270] processed_samples 12600 unjoint_samples 12600 joint_samples 38 [736016, 1046977] processed_samples 12600 unjoint_samples 12600 joint_samples 38 [736016, 1046977] processed_samples 12601 unjoint_samples 12600 joint_samples 37 [1033962, 728993] processed_samples 12601 unjoint_samples 12600 joint_samples 37 [1033962, 728993] processed_samples 12600 unjoint_samples 12600 joint_samples 37 [18651, 1041169] processed_samples 12600 unjoint_samples 12600 joint_samples 37 [18651, 1041169] [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure processed_samples 12700 unjoint_samples 12700 joint_samples 37 [319521, 1041169] processed_samples 12700 unjoint_samples 12700 joint_samples 37 [319521, 1041169] [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure processed_samples 12700 unjoint_samples 12700 joint_samples 37 [1046420, 631649] processed_samples 12700 unjoint_samples 12700 joint_samples 37 [1046420, 631649] processed_samples 12700 unjoint_samples 12700 joint_samples 39 [97162, 1046977] processed_samples 12700 unjoint_samples 12700 joint_samples 39 [97162, 1046977] processed_samples 12700 unjoint_samples 12700 joint_samples 38 [1046948, 1016583] processed_samples 12700 unjoint_samples 12700 joint_samples 38 [1046948, 1016583] processed_samples 12700 unjoint_samples 12700 joint_samples 38 [551282, 1046182] processed_samples 12700 unjoint_samples 12700 joint_samples 38 [551282, 1046182] processed_samples 12700 unjoint_samples 12700 joint_samples 38 [1003555, 950246] processed_samples 12700 unjoint_samples 12700 joint_samples 38 [1003555, 950246] processed_samples 12701 unjoint_samples 12700 joint_samples 38 [76467, 1040474] processed_samples 12701 unjoint_samples 12700 joint_samples 38 [76467, 1040474] processed_samples 12700 unjoint_samples 12700 joint_samples 37 [882971, 1047381] processed_samples 12700 unjoint_samples 12700 joint_samples 37 [882971, 1047381] [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d34cfd80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa454480] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d2a44f40] mmco: unref short failure [h264 @ 0x55e9d2a44f40] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure processed_samples 12800 unjoint_samples 12800 joint_samples 38 [958340, 374187] processed_samples 12800 unjoint_samples 12800 joint_samples 39 [205488, 1048360] processed_samples 12800 unjoint_samples 12800 joint_samples 38 [958340, 374187] processed_samples 12800 unjoint_samples 12800 joint_samples 39 [405884, 1046977] processed_samples 12800 unjoint_samples 12800 joint_samples 39 [205488, 1048360] processed_samples 12800 unjoint_samples 12800 joint_samples 39 [405884, 1046977] [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure processed_samples 12800 unjoint_samples 12800 joint_samples 39 [238663, 1046763] processed_samples 12800 unjoint_samples 12800 joint_samples 37 [652702, 1041169] [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure processed_samples 12800 unjoint_samples 12800 joint_samples 38 [919507, 1046182] processed_samples 12800 unjoint_samples 12800 joint_samples 39 [238663, 1046763] processed_samples 12800 unjoint_samples 12800 joint_samples 37 [652702, 1041169] processed_samples 12800 unjoint_samples 12800 joint_samples 38 [919507, 1046182] processed_samples 12801 unjoint_samples 12800 joint_samples 38 [469178, 1040474] processed_samples 12800 unjoint_samples 12800 joint_samples 37 [1046420, 1005040] processed_samples 12801 unjoint_samples 12800 joint_samples 38 [469178, 1040474] processed_samples 12800 unjoint_samples 12800 joint_samples 37 [1046420, 1005040] [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure processed_samples 12900 unjoint_samples 12900 joint_samples 38 [233495, 1044647] processed_samples 12900 unjoint_samples 12900 joint_samples 38 [233495, 1044647] processed_samples 12900 unjoint_samples 12900 joint_samples 39 [1046902, 165959] processed_samples 12900 unjoint_samples 12900 joint_samples 39 [1046902, 165959] processed_samples 12900 unjoint_samples 12900 joint_samples 38 [958340, 694207] processed_samples 12900 unjoint_samples 12900 joint_samples 38 [958340, 694207] [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure processed_samples 12900 unjoint_samples 12900 joint_samples 39 [644267, 1046977] processed_samples 12900 unjoint_samples 12900 joint_samples 39 [644267, 1046977] processed_samples 12900 unjoint_samples 12900 joint_samples 39 [494679, 1048360] processed_samples 12900 unjoint_samples 12900 joint_samples 39 [494679, 1048360] processed_samples 12900 unjoint_samples 12900 joint_samples 39 [580619, 1046763] processed_samples 12900 unjoint_samples 12900 joint_samples 39 [580619, 1046763] [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure processed_samples 12900 unjoint_samples 12900 joint_samples 37 [1007328, 1041169] processed_samples 12900 unjoint_samples 12900 joint_samples 37 [1007328, 1041169] processed_samples 12901 unjoint_samples 12900 joint_samples 38 [807429, 1040474] processed_samples 12901 unjoint_samples 12900 joint_samples 38 [807429, 1040474] [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3aa454480] mmco: unref short failure [h264 @ 0x55f3aa454480] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d4663280] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3aaf1eb80] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure processed_samples 13000 unjoint_samples 13000 joint_samples 38 [253514, 1047847] processed_samples 13000 unjoint_samples 13000 joint_samples 38 [253514, 1047847] processed_samples 13000 unjoint_samples 13000 joint_samples 38 [500974, 1044647] processed_samples 13000 unjoint_samples 13000 joint_samples 38 [500974, 1044647] processed_samples 13000 unjoint_samples 13000 joint_samples 38 [971636, 971016] processed_samples 13000 unjoint_samples 13000 joint_samples 38 [971636, 971016] processed_samples 13000 unjoint_samples 13000 joint_samples 39 [1046902, 438181] processed_samples 13000 unjoint_samples 13000 joint_samples 39 [1046902, 438181] processed_samples 13000 unjoint_samples 13000 joint_samples 39 [955192, 1048360] processed_samples 13000 unjoint_samples 13000 joint_samples 39 [955192, 1048360] processed_samples 13001 unjoint_samples 13000 joint_samples 39 [129285, 1040653] processed_samples 13001 unjoint_samples 13000 joint_samples 39 [129285, 1040653] [h264 @ 0x55e9d09bb780] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure processed_samples 13000 unjoint_samples 13000 joint_samples 39 [1000964, 1046977] processed_samples 13000 unjoint_samples 13000 joint_samples 39 [1000964, 1046977] processed_samples 13000 unjoint_samples 13000 joint_samples 39 [904412, 1046763] processed_samples 13000 unjoint_samples 13000 joint_samples 39 [904412, 1046763] [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] [h264 @ 0x55f3ad6cb540] mmco: unref short failure mmco: unref short failure [h264 @ 0x55f3ad6cb540] [h264 @ 0x55e9d352ed40] mmco: unref short failure mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure processed_samples 13100 unjoint_samples 13100 joint_samples 39 [1046902, 692769] processed_samples 13100 unjoint_samples 13100 joint_samples 39 [1046902, 692769] processed_samples 13100 unjoint_samples 13100 joint_samples 38 [620716, 1047847] processed_samples 13100 unjoint_samples 13100 joint_samples 38 [620716, 1047847] processed_samples 13100 unjoint_samples 13100 joint_samples 40 [1033793, 241561] processed_samples 13100 unjoint_samples 13100 joint_samples 40 [1033793, 241561] processed_samples 13100 unjoint_samples 13100 joint_samples 39 [321765, 1024301] processed_samples 13100 unjoint_samples 13100 joint_samples 39 [321765, 1024301] processed_samples 13100 unjoint_samples 13100 joint_samples 40 [309636, 1046977] processed_samples 13100 unjoint_samples 13100 joint_samples 40 [309636, 1046977] processed_samples 13100 unjoint_samples 13100 joint_samples 40 [1040110, 226636] processed_samples 13100 unjoint_samples 13100 joint_samples 40 [1040110, 226636] processed_samples 13100 unjoint_samples 13100 joint_samples 38 [928895, 1044647] processed_samples 13100 unjoint_samples 13100 joint_samples 38 [928895, 1044647] processed_samples 13101 unjoint_samples 13100 joint_samples 39 [406790, 1040653] processed_samples 13101 unjoint_samples 13100 joint_samples 39 [406790, 1040653] [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a6b80b40] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d2389280] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure processed_samples 13200 unjoint_samples 13200 joint_samples 39 [1034654, 333875] processed_samples 13200 unjoint_samples 13200 joint_samples 39 [1034654, 333875] processed_samples 13200 unjoint_samples 13200 joint_samples 40 [1033793, 538106] processed_samples 13200 unjoint_samples 13200 joint_samples 40 [1033793, 538106] processed_samples 13200 unjoint_samples 13200 joint_samples 40 [546149, 1046977] processed_samples 13200 unjoint_samples 13200 joint_samples 40 [546149, 1046977] processed_samples 13200 unjoint_samples 13200 joint_samples 39 [666629, 1024301] processed_samples 13200 unjoint_samples 13200 joint_samples 39 [666629, 1024301] processed_samples 13200 unjoint_samples 13200 joint_samples 38 [986957, 1047847] processed_samples 13200 unjoint_samples 13200 joint_samples 38 [986957, 1047847] processed_samples 13201 unjoint_samples 13200 joint_samples 39 [833207, 1040653] processed_samples 13201 unjoint_samples 13200 joint_samples 39 [833207, 1040653] processed_samples 13200 unjoint_samples 13200 joint_samples 40 [1040110, 571355] processed_samples 13200 unjoint_samples 13200 joint_samples 40 [1040110, 571355] processed_samples 13200 unjoint_samples 13200 joint_samples 39 [1046902, 921029] processed_samples 13200 unjoint_samples 13200 joint_samples 39 [1046902, 921029] [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [mov,mp4,m4a,3gp,3g2,mj2 @ 0x55f3a67c5f80] stream 1, offset 0x1400056: partial file [mov,mp4,m4a,3gp,3g2,mj2 @ 0x55e9d6327080] stream 1, offset 0x1400056: partial file [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure processed_samples 13300 unjoint_samples 13300 joint_samples 39 [1037885, 294457] processed_samples 13300 unjoint_samples 13300 joint_samples 39 [1037885, 294457] processed_samples 13300 unjoint_samples 13300 joint_samples 39 [1034654, 661704] processed_samples 13300 unjoint_samples 13300 joint_samples 39 [1034654, 661704] processed_samples 13300 unjoint_samples 13300 joint_samples 40 [188270, 1029454] [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure processed_samples 13300 unjoint_samples 13300 joint_samples 40 [188270, 1029454] [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure processed_samples 13300 unjoint_samples 13300 joint_samples 40 [1040110, 833120] processed_samples 13300 unjoint_samples 13300 joint_samples 40 [1040110, 833120] [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure processed_samples 13300 unjoint_samples 13300 joint_samples 40 [1033793, 946244] processed_samples 13300 unjoint_samples 13300 joint_samples 40 [1033793, 946244] processed_samples 13300 unjoint_samples 13300 joint_samples 39 [966213, 1024301] processed_samples 13300 unjoint_samples 13300 joint_samples 40 [785724, 1046977] processed_samples 13300 unjoint_samples 13300 joint_samples 40 [785724, 1046977] processed_samples 13300 unjoint_samples 13300 joint_samples 39 [966213, 1024301] processed_samples 13302 unjoint_samples 13300 joint_samples 40 [123938, 1046739] processed_samples 13302 unjoint_samples 13300 joint_samples 40 [123938, 1046739] [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d12bb6c0] mmco: unref short failure [h264 @ 0x55e9d12bb6c0] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure processed_samples 13400 unjoint_samples 13400 joint_samples 40 [1043095, 260141] processed_samples 13400 unjoint_samples 13400 joint_samples 40 [1043095, 260141] processed_samples 13400 unjoint_samples 13400 joint_samples 41 [1046419, 47899] processed_samples 13400 unjoint_samples 13400 joint_samples 41 [1046419, 47899] processed_samples 13400 unjoint_samples 13400 joint_samples 41 [1003822, 149790] processed_samples 13400 unjoint_samples 13400 joint_samples 41 [1003822, 149790] processed_samples 13400 unjoint_samples 13400 joint_samples 41 [225344, 1022947] processed_samples 13400 unjoint_samples 13400 joint_samples 41 [225344, 1022947] processed_samples 13400 unjoint_samples 13400 joint_samples 40 [461209, 1029454] processed_samples 13400 unjoint_samples 13400 joint_samples 39 [1034654, 961497] processed_samples 13400 unjoint_samples 13400 joint_samples 40 [461209, 1029454] processed_samples 13400 unjoint_samples 13400 joint_samples 39 [1034654, 961497] processed_samples 13400 unjoint_samples 13400 joint_samples 39 [1037885, 679623] processed_samples 13400 unjoint_samples 13400 joint_samples 39 [1037885, 679623] processed_samples 13402 unjoint_samples 13400 joint_samples 40 [416893, 1046739] processed_samples 13402 unjoint_samples 13400 joint_samples 40 [416893, 1046739] [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3ae983680] mmco: unref short failure [h264 @ 0x55f3ae983680] mmco: unref short failure [h264 @ 0x55f3ae983680] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3ae983680] mmco: unref short failure [h264 @ 0x55f3ae983680] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d43cde40] mmco: unref short failure [h264 @ 0x55e9d43cde40] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure processed_samples 13500 unjoint_samples 13500 joint_samples 40 [1045842, 25528] processed_samples 13500 unjoint_samples 13500 joint_samples 40 [1045842, 25528] processed_samples 13500 unjoint_samples 13500 joint_samples 40 [1043997, 217231] processed_samples 13500 unjoint_samples 13500 joint_samples 40 [1043997, 217231] processed_samples 13500 unjoint_samples 13500 joint_samples 40 [1043095, 629867] processed_samples 13500 unjoint_samples 13500 joint_samples 40 [1043095, 629867] [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure processed_samples 13500 unjoint_samples 13500 joint_samples 41 [1003822, 595687] processed_samples 13500 unjoint_samples 13500 joint_samples 41 [1003822, 595687] processed_samples 13500 unjoint_samples 13500 joint_samples 41 [1046419, 331079] processed_samples 13500 unjoint_samples 13500 joint_samples 41 [1046419, 331079] processed_samples 13500 unjoint_samples 13500 joint_samples 41 [497022, 1022947] processed_samples 13500 unjoint_samples 13500 joint_samples 41 [497022, 1022947] processed_samples 13502 unjoint_samples 13500 joint_samples 40 [789501, 1046739] processed_samples 13500 unjoint_samples 13500 joint_samples 40 [788921, 1029454] processed_samples 13502 unjoint_samples 13500 joint_samples 40 [789501, 1046739] processed_samples 13500 unjoint_samples 13500 joint_samples 40 [788921, 1029454] [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3ae983680] mmco: unref short failure [h264 @ 0x55f3ae983680] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure processed_samples 13600 unjoint_samples 13600 joint_samples 40 [1045842, 447651] [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure processed_samples 13600 unjoint_samples 13600 joint_samples 40 [1045842, 447651] [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure processed_samples 13600 unjoint_samples 13600 joint_samples 40 [1043997, 511902] processed_samples 13600 unjoint_samples 13600 joint_samples 40 [1043997, 511902] processed_samples 13600 unjoint_samples 13600 joint_samples 40 [1043095, 938875] processed_samples 13600 unjoint_samples 13600 joint_samples 40 [1043095, 938875] processed_samples 13600 unjoint_samples 13600 joint_samples 41 [1046419, 612633] processed_samples 13600 unjoint_samples 13600 joint_samples 41 [1046419, 612633] [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure processed_samples 13600 unjoint_samples 13600 joint_samples 41 [90664, 1045335] processed_samples 13600 unjoint_samples 13600 joint_samples 41 [90664, 1045335] processed_samples 13600 unjoint_samples 13600 joint_samples 41 [1003822, 891302] processed_samples 13600 unjoint_samples 13600 joint_samples 41 [1003822, 891302] processed_samples 13600 unjoint_samples 13600 joint_samples 41 [742331, 1022947] processed_samples 13600 unjoint_samples 13600 joint_samples 41 [742331, 1022947] processed_samples 13602 unjoint_samples 13600 joint_samples 41 [2127, 1046739] processed_samples 13602 unjoint_samples 13600 joint_samples 41 [2127, 1046739] [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d32c2340] mmco: unref short failure [h264 @ 0x55e9d32c2340] mmco: unref short failure [h264 @ 0x55f3aaff3800] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure processed_samples 13700 unjoint_samples 13700 joint_samples 41 [485285, 948073] processed_samples 13700 unjoint_samples 13700 joint_samples 41 [485285, 948073] processed_samples 13700 unjoint_samples 13700 joint_samples 42 [135221, 1047588] processed_samples 13700 unjoint_samples 13700 joint_samples 42 [135221, 1047588] processed_samples 13700 unjoint_samples 13700 joint_samples 40 [1045842, 836723] processed_samples 13700 unjoint_samples 13700 joint_samples 40 [1045842, 836723] processed_samples 13700 unjoint_samples 13700 joint_samples 40 [1043997, 781718] processed_samples 13700 unjoint_samples 13700 joint_samples 40 [1043997, 781718] processed_samples 13700 unjoint_samples 13700 joint_samples 41 [327229, 1045335] processed_samples 13700 unjoint_samples 13700 joint_samples 41 [1046058, 1039294] processed_samples 13700 unjoint_samples 13700 joint_samples 41 [327229, 1045335] processed_samples 13700 unjoint_samples 13700 joint_samples 41 [1046058, 1039294] processed_samples 13700 unjoint_samples 13700 joint_samples 41 [1046419, 874280] processed_samples 13700 unjoint_samples 13700 joint_samples 41 [1046419, 874280] processed_samples 13702 unjoint_samples 13700 joint_samples 41 [275334, 1046739] processed_samples 13702 unjoint_samples 13700 joint_samples 41 [275334, 1046739] [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d2a44f40] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure processed_samples 13800 unjoint_samples 13800 joint_samples 42 [1046921, 109850] processed_samples 13800 unjoint_samples 13800 joint_samples 42 [1046921, 109850] [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure processed_samples 13800 unjoint_samples 13800 joint_samples 41 [14303, 1047358] processed_samples 13800 unjoint_samples 13800 joint_samples 41 [14303, 1047358] processed_samples 13800 unjoint_samples 13800 joint_samples 42 [1046058, 421466] processed_samples 13800 unjoint_samples 13800 joint_samples 42 [1046058, 421466] processed_samples 13800 unjoint_samples 13800 joint_samples 41 [883726, 948073] processed_samples 13800 unjoint_samples 13800 joint_samples 41 [883726, 948073] processed_samples 13800 unjoint_samples 13800 joint_samples 42 [485994, 1047588] processed_samples 13800 unjoint_samples 13800 joint_samples 42 [485994, 1047588] processed_samples 13800 unjoint_samples 13800 joint_samples 41 [188716, 1019457] processed_samples 13800 unjoint_samples 13800 joint_samples 41 [188716, 1019457] processed_samples 13800 unjoint_samples 13800 joint_samples 41 [665419, 1045335] processed_samples 13800 unjoint_samples 13800 joint_samples 41 [665419, 1045335] [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure processed_samples 13802 unjoint_samples 13800 joint_samples 41 [590495, 1046739] processed_samples 13802 unjoint_samples 13800 joint_samples 41 [590495, 1046739] [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure processed_samples 13900 unjoint_samples 13900 joint_samples 41 [581064, 1019457] processed_samples 13900 unjoint_samples 13900 joint_samples 41 [581064, 1019457] processed_samples 13900 unjoint_samples 13900 joint_samples 41 [265890, 1047358] processed_samples 13900 unjoint_samples 13900 joint_samples 41 [265890, 1047358] processed_samples 13900 unjoint_samples 13900 joint_samples 42 [1047226, 4751] processed_samples 13900 unjoint_samples 13900 joint_samples 42 [1047226, 4751] processed_samples 13900 unjoint_samples 13900 joint_samples 42 [1046921, 437762] processed_samples 13900 unjoint_samples 13900 joint_samples 42 [1046921, 437762] [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure processed_samples 13900 unjoint_samples 13900 joint_samples 42 [1046058, 797637] processed_samples 13900 unjoint_samples 13900 joint_samples 42 [1046058, 797637] processed_samples 13900 unjoint_samples 13900 joint_samples 41 [977099, 1045335] processed_samples 13900 unjoint_samples 13900 joint_samples 41 [977099, 1045335] processed_samples 13900 unjoint_samples 13900 joint_samples 42 [862864, 1047588] processed_samples 13900 unjoint_samples 13900 joint_samples 42 [862864, 1047588] processed_samples 13902 unjoint_samples 13900 joint_samples 41 [979909, 1046739] processed_samples 13902 unjoint_samples 13900 joint_samples 41 [979909, 1046739] [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3aaf595c0] mmco: unref short failure [h264 @ 0x55f3aaf595c0] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a6d0aa00] mmco: unref short failure [h264 @ 0x55e9d3266a40] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure processed_samples 14000 unjoint_samples 14000 joint_samples 41 [545354, 1047358] processed_samples 14000 unjoint_samples 14000 joint_samples 41 [545354, 1047358] processed_samples 14000 unjoint_samples 14000 joint_samples 42 [1022491, 282792] processed_samples 14000 unjoint_samples 14000 joint_samples 42 [1022491, 282792] processed_samples 14000 unjoint_samples 14000 joint_samples 43 [117266, 1029322] processed_samples 14000 unjoint_samples 14000 joint_samples 43 [1044785, 217160] processed_samples 14000 unjoint_samples 14000 joint_samples 43 [1044785, 217160] processed_samples 14000 unjoint_samples 14000 joint_samples 43 [117266, 1029322] processed_samples 14000 unjoint_samples 14000 joint_samples 42 [1047226, 367346] processed_samples 14000 unjoint_samples 14000 joint_samples 42 [1047226, 367346] processed_samples 14000 unjoint_samples 14000 joint_samples 41 [858011, 1019457] processed_samples 14000 unjoint_samples 14000 joint_samples 41 [858011, 1019457] processed_samples 14000 unjoint_samples 14000 joint_samples 42 [1046921, 682193] processed_samples 14000 unjoint_samples 14000 joint_samples 42 [1046921, 682193] processed_samples 14002 unjoint_samples 14000 joint_samples 42 [216779, 1046739] processed_samples 14002 unjoint_samples 14000 joint_samples 42 [216779, 1046739] [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d61951c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d2523300] mmco: unref short failure [h264 @ 0x55e9d2523300] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a72f1e00] mmco: unref short failure [h264 @ 0x55f3a72f1e00] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3ad7ec440] mmco: unref short failure [h264 @ 0x55f3ad7ec440] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d568b380] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure processed_samples 14100 unjoint_samples 14100 joint_samples 43 [413456, 1029322] processed_samples 14100 unjoint_samples 14100 joint_samples 43 [1044785, 489351] processed_samples 14100 unjoint_samples 14100 joint_samples 43 [1044785, 489351] processed_samples 14100 unjoint_samples 14100 joint_samples 43 [413456, 1029322] processed_samples 14100 unjoint_samples 14100 joint_samples 42 [1013536, 242326] processed_samples 14100 unjoint_samples 14100 joint_samples 42 [1013536, 242326] processed_samples 14100 unjoint_samples 14100 joint_samples 42 [1022491, 613183] processed_samples 14100 unjoint_samples 14100 joint_samples 42 [1022491, 613183] processed_samples 14100 unjoint_samples 14100 joint_samples 42 [1047226, 682567] processed_samples 14100 unjoint_samples 14100 joint_samples 42 [1047226, 682567] processed_samples 14100 unjoint_samples 14100 joint_samples 41 [804177, 1047358] processed_samples 14100 unjoint_samples 14100 joint_samples 41 [804177, 1047358] [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure processed_samples 14100 unjoint_samples 14100 joint_samples 42 [1046921, 934850] processed_samples 14100 unjoint_samples 14100 joint_samples 42 [1046921, 934850] processed_samples 14102 unjoint_samples 14100 joint_samples 42 [540745, 1046739] processed_samples 14102 unjoint_samples 14100 joint_samples 42 [540745, 1046739] [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a6f63e00] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d7a57f00] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d568b380] mmco: unref short failure [h264 @ 0x55e9d568b380] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure processed_samples 14200 unjoint_samples 14200 joint_samples 43 [1044785, 707018] processed_samples 14200 unjoint_samples 14200 joint_samples 43 [1044785, 707018] processed_samples 14200 unjoint_samples 14200 joint_samples 42 [1013536, 494663] processed_samples 14200 unjoint_samples 14200 joint_samples 42 [1013536, 494663] processed_samples 14200 unjoint_samples 14200 joint_samples 43 [805715, 1029322] processed_samples 14200 unjoint_samples 14200 joint_samples 43 [805715, 1029322] processed_samples 14200 unjoint_samples 14200 joint_samples 42 [1047226, 1036081] processed_samples 14200 unjoint_samples 14200 joint_samples 42 [1047226, 1036081] processed_samples 14200 unjoint_samples 14200 joint_samples 43 [202840, 1044517] processed_samples 14200 unjoint_samples 14200 joint_samples 43 [202840, 1044517] processed_samples 14200 unjoint_samples 14200 joint_samples 41 [1023756, 1047358] processed_samples 14200 unjoint_samples 14200 joint_samples 41 [1023756, 1047358] [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure processed_samples 14200 unjoint_samples 14200 joint_samples 42 [1032642, 1034802] processed_samples 14200 unjoint_samples 14200 joint_samples 42 [1032642, 1034802] processed_samples 14202 unjoint_samples 14200 joint_samples 42 [833977, 1046739] processed_samples 14202 unjoint_samples 14200 joint_samples 42 [833977, 1046739] [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d61951c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d32c2340] mmco: unref short failure [h264 @ 0x55e9d32c2340] mmco: unref short failure [h264 @ 0x55e9d32c2340] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a679f200] mmco: unref short failure [h264 @ 0x55f3a679f200] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure processed_samples 14300 unjoint_samples 14300 joint_samples 44 [1046649, 22820] processed_samples 14300 unjoint_samples 14300 joint_samples 44 [1046649, 22820] processed_samples 14300 unjoint_samples 14300 joint_samples 42 [222326, 1047358] processed_samples 14300 unjoint_samples 14300 joint_samples 42 [222326, 1047358] processed_samples 14300 unjoint_samples 14300 joint_samples 43 [276028, 1047622] [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure processed_samples 14300 unjoint_samples 14300 joint_samples 43 [276028, 1047622] processed_samples 14300 unjoint_samples 14300 joint_samples 42 [1013536, 828390] processed_samples 14300 unjoint_samples 14300 joint_samples 42 [1013536, 828390] processed_samples 14300 unjoint_samples 14300 joint_samples 43 [1044945, 390550] processed_samples 14300 unjoint_samples 14300 joint_samples 43 [1044945, 390550] processed_samples 14300 unjoint_samples 14300 joint_samples 43 [542092, 1044517] processed_samples 14300 unjoint_samples 14300 joint_samples 43 [542092, 1044517] processed_samples 14300 unjoint_samples 14300 joint_samples 43 [1044785, 1002748] processed_samples 14302 unjoint_samples 14300 joint_samples 43 [25054, 1046739] processed_samples 14300 unjoint_samples 14300 joint_samples 43 [1044785, 1002748] processed_samples 14302 unjoint_samples 14300 joint_samples 43 [25054, 1046739] [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d09bb780] mmco: unref short failure [h264 @ 0x55e9d09bb780] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure processed_samples 14400 unjoint_samples 14400 joint_samples 43 [1013536, 243399] processed_samples 14400 unjoint_samples 14400 joint_samples 42 [557927, 1047358] processed_samples 14400 unjoint_samples 14400 joint_samples 42 [557927, 1047358] processed_samples 14400 unjoint_samples 14400 joint_samples 43 [1013536, 243399] processed_samples 14400 unjoint_samples 14400 joint_samples 43 [492882, 1047622] processed_samples 14400 unjoint_samples 14400 joint_samples 44 [311459, 1047533] processed_samples 14400 unjoint_samples 14400 joint_samples 43 [492882, 1047622] processed_samples 14400 unjoint_samples 14400 joint_samples 44 [311459, 1047533] processed_samples 14400 unjoint_samples 14400 joint_samples 44 [1046649, 349632] processed_samples 14400 unjoint_samples 14400 joint_samples 44 [1046649, 349632] processed_samples 14400 unjoint_samples 14400 joint_samples 43 [1044945, 701635] processed_samples 14400 unjoint_samples 14400 joint_samples 43 [1044945, 701635] processed_samples 14402 unjoint_samples 14400 joint_samples 43 [432279, 1046739] processed_samples 14402 unjoint_samples 14400 joint_samples 43 [432279, 1046739] processed_samples 14400 unjoint_samples 14400 joint_samples 43 [840560, 1044517] processed_samples 14400 unjoint_samples 14400 joint_samples 43 [840560, 1044517] [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure processed_samples 14500 unjoint_samples 14500 joint_samples 44 [547122, 1047533] processed_samples 14500 unjoint_samples 14500 joint_samples 43 [1013536, 456744] processed_samples 14500 unjoint_samples 14500 joint_samples 44 [1047727, 66098] processed_samples 14500 unjoint_samples 14500 joint_samples 44 [1047727, 66098] processed_samples 14500 unjoint_samples 14500 joint_samples 44 [547122, 1047533] processed_samples 14500 unjoint_samples 14500 joint_samples 43 [1013536, 456744] processed_samples 14500 unjoint_samples 14500 joint_samples 43 [797705, 1047622] processed_samples 14500 unjoint_samples 14500 joint_samples 42 [888648, 1047358] processed_samples 14500 unjoint_samples 14500 joint_samples 43 [797705, 1047622] processed_samples 14500 unjoint_samples 14500 joint_samples 42 [888648, 1047358] processed_samples 14500 unjoint_samples 14500 joint_samples 44 [1046649, 578292] processed_samples 14500 unjoint_samples 14500 joint_samples 44 [1046649, 578292] processed_samples 14502 unjoint_samples 14500 joint_samples 43 [692740, 1046739] processed_samples 14502 unjoint_samples 14500 joint_samples 43 [692740, 1046739] processed_samples 14500 unjoint_samples 14500 joint_samples 43 [1044945, 1031585] processed_samples 14500 unjoint_samples 14500 joint_samples 43 [1044945, 1031585] [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d3bd0700] mmco: unref short failure [h264 @ 0x55e9d3bd0700] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3bc39c0] mmco: unref short failure [h264 @ 0x55e9d3bc39c0] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure processed_samples 14600 unjoint_samples 14600 joint_samples 43 [229330, 1047358] processed_samples 14600 unjoint_samples 14600 joint_samples 43 [229330, 1047358] processed_samples 14600 unjoint_samples 14600 joint_samples 44 [856377, 1047533] processed_samples 14600 unjoint_samples 14600 joint_samples 44 [856377, 1047533] processed_samples 14600 unjoint_samples 14600 joint_samples 44 [1047727, 386318] processed_samples 14600 unjoint_samples 14600 joint_samples 44 [1036073, 70686] processed_samples 14600 unjoint_samples 14600 joint_samples 44 [1036073, 70686] processed_samples 14600 unjoint_samples 14600 joint_samples 44 [1047727, 386318] processed_samples 14600 unjoint_samples 14600 joint_samples 44 [335365, 1047813] processed_samples 14600 unjoint_samples 14600 joint_samples 44 [335365, 1047813] processed_samples 14600 unjoint_samples 14600 joint_samples 44 [1046649, 906405] processed_samples 14600 unjoint_samples 14600 joint_samples 44 [1046649, 906405] processed_samples 14600 unjoint_samples 14600 joint_samples 43 [1013536, 795613] processed_samples 14600 unjoint_samples 14600 joint_samples 43 [1013536, 795613] processed_samples 14602 unjoint_samples 14600 joint_samples 43 [927425, 1046739] processed_samples 14602 unjoint_samples 14600 joint_samples 43 [927425, 1046739] [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d378a740] mmco: unref short failure [h264 @ 0x55e9d378a740] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure processed_samples 14700 unjoint_samples 14700 joint_samples 44 [1046225, 140804] processed_samples 14700 unjoint_samples 14700 joint_samples 44 [1046225, 140804] processed_samples 14700 unjoint_samples 14700 joint_samples 43 [611597, 1047358] processed_samples 14700 unjoint_samples 14700 joint_samples 43 [611597, 1047358] processed_samples 14700 unjoint_samples 14700 joint_samples 45 [1046649, 168423] processed_samples 14700 unjoint_samples 14700 joint_samples 45 [1046649, 168423] processed_samples 14700 unjoint_samples 14700 joint_samples 45 [1046814, 132872] processed_samples 14700 unjoint_samples 14700 joint_samples 45 [1046814, 132872] processed_samples 14700 unjoint_samples 14700 joint_samples 44 [1036073, 394274] processed_samples 14700 unjoint_samples 14700 joint_samples 44 [1036073, 394274] processed_samples 14700 unjoint_samples 14700 joint_samples 44 [681790, 1047813] processed_samples 14700 unjoint_samples 14700 joint_samples 44 [681790, 1047813] processed_samples 14702 unjoint_samples 14700 joint_samples 44 [150888, 1046739] processed_samples 14702 unjoint_samples 14700 joint_samples 44 [150888, 1046739] processed_samples 14700 unjoint_samples 14700 joint_samples 44 [1047727, 712623] processed_samples 14700 unjoint_samples 14700 joint_samples 44 [1047727, 712623] [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d49fd540] mmco: unref short failure [h264 @ 0x55e9d49fd540] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d446c1c0] mmco: unref short failure [h264 @ 0x55f3a676fac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure processed_samples 14800 unjoint_samples 14800 joint_samples 43 [869907, 1047358] processed_samples 14800 unjoint_samples 14800 joint_samples 43 [869907, 1047358] processed_samples 14800 unjoint_samples 14800 joint_samples 45 [1046649, 444092] processed_samples 14800 unjoint_samples 14800 joint_samples 44 [1046225, 489810] processed_samples 14800 unjoint_samples 14800 joint_samples 45 [1046649, 444092] processed_samples 14800 unjoint_samples 14800 joint_samples 44 [1046225, 489810] processed_samples 14800 unjoint_samples 14800 joint_samples 44 [1036073, 628244] processed_samples 14800 unjoint_samples 14800 joint_samples 44 [1036073, 628244] [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure processed_samples 14800 unjoint_samples 14800 joint_samples 44 [1021803, 1047813] processed_samples 14800 unjoint_samples 14800 joint_samples 44 [1021803, 1047813] [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure processed_samples 14800 unjoint_samples 14800 joint_samples 45 [1046814, 505877] processed_samples 14800 unjoint_samples 14800 joint_samples 45 [1046814, 505877] processed_samples 14802 unjoint_samples 14800 joint_samples 44 [515767, 1046739] processed_samples 14802 unjoint_samples 14800 joint_samples 44 [515767, 1046739] processed_samples 14800 unjoint_samples 14800 joint_samples 44 [1047727, 1006730] processed_samples 14800 unjoint_samples 14800 joint_samples 44 [1047727, 1006730] [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [mov,mp4,m4a,3gp,3g2,mj2 @ 0x55f3a83b7240] stream 1, offset 0x1400056: partial file [mov,mp4,m4a,3gp,3g2,mj2 @ 0x55e9d06af5c0] stream 1, offset 0x1400056: partial file [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d3bc39c0] mmco: unref short failure [h264 @ 0x55e9d3bc39c0] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d09bb780] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure processed_samples 14900 unjoint_samples 14900 joint_samples 44 [178210, 1047358] processed_samples 14900 unjoint_samples 14900 joint_samples 44 [178210, 1047358] processed_samples 14900 unjoint_samples 14900 joint_samples 44 [1046225, 795798] processed_samples 14900 unjoint_samples 14900 joint_samples 44 [1046225, 795798] processed_samples 14900 unjoint_samples 14900 joint_samples 45 [1031061, 523880] processed_samples 14900 unjoint_samples 14900 joint_samples 45 [1031061, 523880] processed_samples 14900 unjoint_samples 14900 joint_samples 45 [1047770, 201899] processed_samples 14900 unjoint_samples 14900 joint_samples 45 [1047770, 201899] processed_samples 14900 unjoint_samples 14900 joint_samples 45 [1046814, 820056] processed_samples 14900 unjoint_samples 14900 joint_samples 45 [1046814, 820056] processed_samples 14900 unjoint_samples 14900 joint_samples 44 [1036073, 910374] processed_samples 14901 unjoint_samples 14900 joint_samples 45 [1046649, 798767] processed_samples 14901 unjoint_samples 14900 joint_samples 45 [1046649, 798767] processed_samples 14900 unjoint_samples 14900 joint_samples 44 [1036073, 910374] [h264 @ 0x55e9d08ab5c0] mmco: unref short failure [h264 @ 0x55e9d08ab5c0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure processed_samples 14902 unjoint_samples 14900 joint_samples 44 [782988, 1046739] processed_samples 14902 unjoint_samples 14900 joint_samples 44 [782988, 1046739] [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d2523300] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure processed_samples 15000 unjoint_samples 15000 joint_samples 45 [135153, 1046636] processed_samples 15000 unjoint_samples 15000 joint_samples 45 [135153, 1046636] processed_samples 15000 unjoint_samples 15000 joint_samples 46 [202771, 1042222] [h264 @ 0x55e9d398e240] mmco: unref short failure processed_samples 15000 unjoint_samples 15000 joint_samples 46 [202771, 1042222] [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure processed_samples 15000 unjoint_samples 15000 joint_samples 45 [1047770, 488903] processed_samples 15000 unjoint_samples 15000 joint_samples 45 [1047770, 488903] processed_samples 15000 unjoint_samples 15000 joint_samples 45 [1031061, 812334] processed_samples 15000 unjoint_samples 15000 joint_samples 45 [1031061, 812334] [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure processed_samples 15000 unjoint_samples 15000 joint_samples 44 [1046225, 1046455] processed_samples 15000 unjoint_samples 15000 joint_samples 44 [1046225, 1046455] processed_samples 15000 unjoint_samples 15000 joint_samples 44 [494650, 1047358] processed_samples 15002 unjoint_samples 15000 joint_samples 44 [1021168, 1046739] processed_samples 15000 unjoint_samples 15000 joint_samples 44 [494650, 1047358] processed_samples 15002 unjoint_samples 15000 joint_samples 44 [1021168, 1046739] [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure processed_samples 15001 unjoint_samples 15000 joint_samples 46 [95706, 1026615] [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure processed_samples 15001 unjoint_samples 15000 joint_samples 46 [95706, 1026615] [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55e9d09bb780] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d2523300] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure processed_samples 15100 unjoint_samples 15100 joint_samples 45 [1046225, 207374] processed_samples 15100 unjoint_samples 15100 joint_samples 45 [1046225, 207374] [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure processed_samples 15100 unjoint_samples 15100 joint_samples 45 [437036, 1046636] processed_samples 15100 unjoint_samples 15100 joint_samples 45 [437036, 1046636] processed_samples 15100 unjoint_samples 15100 joint_samples 46 [473855, 1042222] processed_samples 15100 unjoint_samples 15100 joint_samples 46 [473855, 1042222] [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure processed_samples 15100 unjoint_samples 15100 joint_samples 46 [1043572, 143226] processed_samples 15100 unjoint_samples 15100 joint_samples 46 [1043572, 143226] processed_samples 15100 unjoint_samples 15100 joint_samples 45 [1047770, 729896] processed_samples 15100 unjoint_samples 15100 joint_samples 44 [814327, 1047358] [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure processed_samples 15100 unjoint_samples 15100 joint_samples 45 [1047770, 729896] processed_samples 15100 unjoint_samples 15100 joint_samples 44 [814327, 1047358] [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure processed_samples 15101 unjoint_samples 15100 joint_samples 46 [341718, 1026615] processed_samples 15101 unjoint_samples 15100 joint_samples 46 [341718, 1026615] [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure processed_samples 15102 unjoint_samples 15100 joint_samples 45 [277527, 1046739] processed_samples 15102 unjoint_samples 15100 joint_samples 45 [277527, 1046739] [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure processed_samples 15200 unjoint_samples 15200 joint_samples 46 [728005, 1042222] processed_samples 15200 unjoint_samples 15200 joint_samples 45 [1046225, 537097] processed_samples 15200 unjoint_samples 15200 joint_samples 46 [728005, 1042222] processed_samples 15200 unjoint_samples 15200 joint_samples 45 [1046225, 537097] processed_samples 15200 unjoint_samples 15200 joint_samples 45 [1040687, 160027] processed_samples 15200 unjoint_samples 15200 joint_samples 45 [1040687, 160027] processed_samples 15200 unjoint_samples 15200 joint_samples 46 [61639, 1047565] processed_samples 15200 unjoint_samples 15200 joint_samples 46 [61639, 1047565] processed_samples 15200 unjoint_samples 15200 joint_samples 45 [748316, 1046636] processed_samples 15200 unjoint_samples 15200 joint_samples 46 [1043572, 464591] processed_samples 15200 unjoint_samples 15200 joint_samples 46 [1043572, 464591] processed_samples 15200 unjoint_samples 15200 joint_samples 45 [748316, 1046636] processed_samples 15202 unjoint_samples 15200 joint_samples 45 [568464, 1046739] processed_samples 15202 unjoint_samples 15200 joint_samples 45 [568464, 1046739] processed_samples 15201 unjoint_samples 15200 joint_samples 46 [696980, 1026615] processed_samples 15201 unjoint_samples 15200 joint_samples 46 [696980, 1026615] [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d2523300] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [mov,mp4,m4a,3gp,3g2,mj2 @ 0x55f3aaeac100] stream 1, offset 0x1400056: partial file [mov,mp4,m4a,3gp,3g2,mj2 @ 0x55e9d4603840] stream 1, offset 0x1400056: partial file [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3aaf595c0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a6fcbec0] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure processed_samples 15300 unjoint_samples 15300 joint_samples 46 [101705, 1046636] processed_samples 15300 unjoint_samples 15300 joint_samples 46 [101705, 1046636] [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure processed_samples 15300 unjoint_samples 15300 joint_samples 45 [1040687, 448479] processed_samples 15300 unjoint_samples 15300 joint_samples 45 [1040687, 448479] processed_samples 15300 unjoint_samples 15300 joint_samples 45 [1046225, 909153] processed_samples 15300 unjoint_samples 15300 joint_samples 45 [1046225, 909153] processed_samples 15300 unjoint_samples 15300 joint_samples 46 [332751, 1047565] processed_samples 15300 unjoint_samples 15300 joint_samples 46 [332751, 1047565] processed_samples 15300 unjoint_samples 15300 joint_samples 46 [1043572, 772286] processed_samples 15300 unjoint_samples 15300 joint_samples 46 [1043572, 772286] processed_samples 15301 unjoint_samples 15300 joint_samples 46 [959771, 1026615] processed_samples 15301 unjoint_samples 15300 joint_samples 46 [959771, 1026615] processed_samples 15301 unjoint_samples 15300 joint_samples 46 [1014050, 1042222] processed_samples 15301 unjoint_samples 15300 joint_samples 46 [1014050, 1042222] [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure processed_samples 15302 unjoint_samples 15300 joint_samples 45 [936738, 1046739] processed_samples 15302 unjoint_samples 15300 joint_samples 45 [936738, 1046739] [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure processed_samples 15400 unjoint_samples 15400 joint_samples 47 [28282, 1034854] processed_samples 15400 unjoint_samples 15400 joint_samples 47 [28282, 1034854] processed_samples 15400 unjoint_samples 15400 joint_samples 46 [395642, 1046636] processed_samples 15400 unjoint_samples 15400 joint_samples 46 [395642, 1046636] processed_samples 15400 unjoint_samples 15400 joint_samples 46 [157179, 1048316] processed_samples 15400 unjoint_samples 15400 joint_samples 46 [157179, 1048316] processed_samples 15400 unjoint_samples 15400 joint_samples 45 [1040687, 767436] [h264 @ 0x55e9d1240980] mmco: unref short failure processed_samples 15400 unjoint_samples 15400 joint_samples 45 [1040687, 767436] [h264 @ 0x55f3a667fe80] mmco: unref short failure processed_samples 15401 unjoint_samples 15400 joint_samples 47 [168631, 1048291] processed_samples 15401 unjoint_samples 15400 joint_samples 47 [168631, 1048291] processed_samples 15401 unjoint_samples 15400 joint_samples 47 [1046828, 241061] processed_samples 15401 unjoint_samples 15400 joint_samples 47 [1046828, 241061] processed_samples 15400 unjoint_samples 15400 joint_samples 46 [665835, 1047565] processed_samples 15400 unjoint_samples 15400 joint_samples 46 [665835, 1047565] processed_samples 15402 unjoint_samples 15400 joint_samples 46 [1038063, 278032] processed_samples 15402 unjoint_samples 15400 joint_samples 46 [1038063, 278032] [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d3bc39c0] mmco: unref short failure [h264 @ 0x55e9d3bc39c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d3bc39c0] mmco: unref short failure [h264 @ 0x55e9d3bc39c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure processed_samples 15500 unjoint_samples 15500 joint_samples 46 [1044203, 77813] processed_samples 15500 unjoint_samples 15500 joint_samples 46 [513048, 1048316] processed_samples 15500 unjoint_samples 15500 joint_samples 46 [1044203, 77813] processed_samples 15500 unjoint_samples 15500 joint_samples 46 [513048, 1048316] processed_samples 15500 unjoint_samples 15500 joint_samples 47 [452316, 1034854] processed_samples 15500 unjoint_samples 15500 joint_samples 47 [452316, 1034854] processed_samples 15501 unjoint_samples 15500 joint_samples 47 [409843, 1048291] processed_samples 15501 unjoint_samples 15500 joint_samples 47 [409843, 1048291] processed_samples 15500 unjoint_samples 15500 joint_samples 46 [644706, 1046636] processed_samples 15500 unjoint_samples 15500 joint_samples 46 [644706, 1046636] processed_samples 15500 unjoint_samples 15500 joint_samples 46 [933499, 1047565] processed_samples 15500 unjoint_samples 15500 joint_samples 46 [933499, 1047565] processed_samples 15502 unjoint_samples 15500 joint_samples 46 [1038063, 569484] processed_samples 15501 unjoint_samples 15500 joint_samples 47 [1046828, 476780] processed_samples 15502 unjoint_samples 15500 joint_samples 46 [1038063, 569484] processed_samples 15501 unjoint_samples 15500 joint_samples 47 [1046828, 476780] [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d2389280] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d2389280] mmco: unref short failure [h264 @ 0x55e9d2389280] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure processed_samples 15600 unjoint_samples 15600 joint_samples 46 [1044203, 386054] processed_samples 15600 unjoint_samples 15600 joint_samples 46 [1044203, 386054] processed_samples 15600 unjoint_samples 15600 joint_samples 47 [1043118, 417114] processed_samples 15600 unjoint_samples 15600 joint_samples 47 [1043118, 417114] processed_samples 15600 unjoint_samples 15600 joint_samples 46 [712683, 1048316] processed_samples 15600 unjoint_samples 15600 joint_samples 46 [712683, 1048316] processed_samples 15600 unjoint_samples 15600 joint_samples 47 [706868, 1034854] processed_samples 15600 unjoint_samples 15600 joint_samples 47 [706868, 1034854] processed_samples 15601 unjoint_samples 15600 joint_samples 47 [1046828, 822763] processed_samples 15601 unjoint_samples 15600 joint_samples 47 [739804, 1048291] processed_samples 15601 unjoint_samples 15600 joint_samples 47 [739804, 1048291] processed_samples 15601 unjoint_samples 15600 joint_samples 47 [1046828, 822763] processed_samples 15600 unjoint_samples 15600 joint_samples 46 [946254, 1046636] processed_samples 15602 unjoint_samples 15600 joint_samples 46 [1038063, 913309] processed_samples 15600 unjoint_samples 15600 joint_samples 46 [946254, 1046636] processed_samples 15602 unjoint_samples 15600 joint_samples 46 [1038063, 913309] [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure /root/miniconda3/envs/py38/lib/python3.8/site-packages/PIL/TiffImagePlugin.py:870: UserWarning: Corrupt EXIF data. Expecting to read 4 bytes but only got 0. warnings.warn(str(msg)) /root/miniconda3/envs/py38/lib/python3.8/site-packages/PIL/TiffImagePlugin.py:870: UserWarning: Corrupt EXIF data. Expecting to read 4 bytes but only got 0. warnings.warn(str(msg)) [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a7593200] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aaff3800] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure processed_samples 15700 unjoint_samples 15700 joint_samples 47 [228089, 1046636] processed_samples 15700 unjoint_samples 15700 joint_samples 47 [228089, 1046636] [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure processed_samples 15701 unjoint_samples 15700 joint_samples 48 [1047566, 14759] processed_samples 15701 unjoint_samples 15700 joint_samples 48 [1047566, 14759] [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure processed_samples 15700 unjoint_samples 15700 joint_samples 48 [1044982, 17713] processed_samples 15700 unjoint_samples 15700 joint_samples 48 [1044982, 17713] processed_samples 15701 unjoint_samples 15700 joint_samples 48 [244531, 1031554] processed_samples 15701 unjoint_samples 15700 joint_samples 48 [244531, 1031554] processed_samples 15700 unjoint_samples 15700 joint_samples 47 [1043118, 851775] processed_samples 15700 unjoint_samples 15700 joint_samples 47 [1043118, 851775] processed_samples 15700 unjoint_samples 15700 joint_samples 46 [1044203, 639975] processed_samples 15700 unjoint_samples 15700 joint_samples 46 [1044203, 639975] processed_samples 15700 unjoint_samples 15700 joint_samples 46 [937639, 1048316] [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure processed_samples 15700 unjoint_samples 15700 joint_samples 46 [937639, 1048316] [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure processed_samples 15702 unjoint_samples 15700 joint_samples 47 [1046926, 137721] processed_samples 15702 unjoint_samples 15700 joint_samples 47 [1046926, 137721] [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure processed_samples 15800 unjoint_samples 15800 joint_samples 48 [1047493, 255054] processed_samples 15800 unjoint_samples 15800 joint_samples 48 [1047493, 255054] processed_samples 15800 unjoint_samples 15800 joint_samples 48 [1044982, 282610] processed_samples 15800 unjoint_samples 15800 joint_samples 48 [1044982, 282610] processed_samples 15800 unjoint_samples 15800 joint_samples 47 [1047388, 233707] processed_samples 15800 unjoint_samples 15800 joint_samples 47 [1047388, 233707] [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d706d900] mmco: unref short failure [h264 @ 0x55e9d706d900] mmco: unref short failure processed_samples 15801 unjoint_samples 15800 joint_samples 48 [1047566, 360381] processed_samples 15801 unjoint_samples 15800 joint_samples 48 [1047566, 360381] processed_samples 15801 unjoint_samples 15800 joint_samples 48 [548291, 1031554] processed_samples 15801 unjoint_samples 15800 joint_samples 48 [548291, 1031554] processed_samples 15800 unjoint_samples 15800 joint_samples 46 [1044203, 970669] processed_samples 15800 unjoint_samples 15800 joint_samples 46 [1044203, 970669] [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure processed_samples 15800 unjoint_samples 15800 joint_samples 47 [517153, 1046636] [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure processed_samples 15802 unjoint_samples 15800 joint_samples 47 [1046926, 556537] processed_samples 15800 unjoint_samples 15800 joint_samples 47 [517153, 1046636] processed_samples 15802 unjoint_samples 15800 joint_samples 47 [1046926, 556537] [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3ad7ec440] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d09bb780] mmco: unref short failure [h264 @ 0x55e9d09bb780] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure processed_samples 15900 unjoint_samples 15900 joint_samples 47 [1046751, 370829] processed_samples 15900 unjoint_samples 15900 joint_samples 47 [1047388, 617128] processed_samples 15900 unjoint_samples 15900 joint_samples 47 [1046751, 370829] processed_samples 15900 unjoint_samples 15900 joint_samples 47 [1047388, 617128] processed_samples 15900 unjoint_samples 15900 joint_samples 48 [1044982, 601705] processed_samples 15900 unjoint_samples 15900 joint_samples 48 [1044982, 601705] processed_samples 15900 unjoint_samples 15900 joint_samples 47 [887561, 1046636] processed_samples 15900 unjoint_samples 15900 joint_samples 47 [887561, 1046636] processed_samples 15900 unjoint_samples 15900 joint_samples 48 [1047493, 564745] processed_samples 15900 unjoint_samples 15900 joint_samples 48 [1047493, 564745] processed_samples 15901 unjoint_samples 15900 joint_samples 48 [1047566, 648942] processed_samples 15901 unjoint_samples 15900 joint_samples 48 [1047566, 648942] processed_samples 15902 unjoint_samples 15900 joint_samples 47 [1046926, 787017] processed_samples 15902 unjoint_samples 15900 joint_samples 47 [1046926, 787017] processed_samples 15901 unjoint_samples 15900 joint_samples 48 [1018895, 1031554] processed_samples 15901 unjoint_samples 15900 joint_samples 48 [1018895, 1031554] [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d2636880] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d2636880] mmco: unref short failure [h264 @ 0x55e9d2636880] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aea854c0] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure processed_samples 16000 unjoint_samples 16000 joint_samples 47 [1046751, 758428] processed_samples 16000 unjoint_samples 16000 joint_samples 47 [1046751, 758428] processed_samples 16000 unjoint_samples 16000 joint_samples 48 [1045818, 217590] processed_samples 16000 unjoint_samples 16000 joint_samples 48 [1045818, 217590] processed_samples 16000 unjoint_samples 16000 joint_samples 47 [1047388, 905712] processed_samples 16000 unjoint_samples 16000 joint_samples 47 [1047388, 905712] [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure processed_samples 16001 unjoint_samples 16000 joint_samples 49 [1046936, 229417] processed_samples 16001 unjoint_samples 16000 joint_samples 49 [1046936, 229417] processed_samples 16000 unjoint_samples 16000 joint_samples 48 [1047493, 816218] processed_samples 16000 unjoint_samples 16000 joint_samples 48 [1047493, 816218] processed_samples 16001 unjoint_samples 16000 joint_samples 48 [1047566, 913646] processed_samples 16001 unjoint_samples 16000 joint_samples 48 [1047566, 913646] [h264 @ 0x55f3a679f200] mmco: unref short failure [h264 @ 0x55f3a679f200] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure processed_samples 16002 unjoint_samples 16000 joint_samples 48 [82308, 1038556] processed_samples 16002 unjoint_samples 16000 joint_samples 48 [82308, 1038556] processed_samples 16000 unjoint_samples 16000 joint_samples 48 [1044982, 982089] processed_samples 16000 unjoint_samples 16000 joint_samples 48 [1044982, 982089] [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure processed_samples 16100 unjoint_samples 16100 joint_samples 48 [1046751, 6861] processed_samples 16100 unjoint_samples 16100 joint_samples 48 [1046751, 6861] processed_samples 16100 unjoint_samples 16100 joint_samples 49 [1044982, 378784] processed_samples 16100 unjoint_samples 16100 joint_samples 49 [1044982, 378784] processed_samples 16100 unjoint_samples 16100 joint_samples 49 [42140, 1046672] processed_samples 16100 unjoint_samples 16100 joint_samples 49 [42140, 1046672] processed_samples 16100 unjoint_samples 16100 joint_samples 48 [135669, 1047190] processed_samples 16100 unjoint_samples 16100 joint_samples 48 [135669, 1047190] processed_samples 16101 unjoint_samples 16100 joint_samples 49 [246540, 1033598] processed_samples 16101 unjoint_samples 16100 joint_samples 49 [246540, 1033598] processed_samples 16100 unjoint_samples 16100 joint_samples 48 [1045818, 541551] processed_samples 16100 unjoint_samples 16100 joint_samples 48 [1045818, 541551] processed_samples 16101 unjoint_samples 16100 joint_samples 49 [1046936, 534053] processed_samples 16101 unjoint_samples 16100 joint_samples 49 [1046936, 534053] [h264 @ 0x55e9d2523300] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure processed_samples 16102 unjoint_samples 16100 joint_samples 48 [442957, 1038556] processed_samples 16102 unjoint_samples 16100 joint_samples 48 [442957, 1038556] [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f39bbeec80] mmco: unref short failure [h264 @ 0x55f39bbeec80] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure processed_samples 16200 unjoint_samples 16200 joint_samples 48 [1046751, 430024] processed_samples 16200 unjoint_samples 16200 joint_samples 48 [1046751, 430024] [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure processed_samples 16200 unjoint_samples 16200 joint_samples 48 [447555, 1047190] processed_samples 16200 unjoint_samples 16200 joint_samples 48 [447555, 1047190] processed_samples 16200 unjoint_samples 16200 joint_samples 48 [1045818, 909847] processed_samples 16200 unjoint_samples 16200 joint_samples 48 [1045818, 909847] processed_samples 16200 unjoint_samples 16200 joint_samples 49 [487211, 1046672] processed_samples 16200 unjoint_samples 16200 joint_samples 49 [487211, 1046672] processed_samples 16201 unjoint_samples 16200 joint_samples 49 [499583, 1033598] processed_samples 16201 unjoint_samples 16200 joint_samples 49 [499583, 1033598] processed_samples 16200 unjoint_samples 16200 joint_samples 49 [1044982, 733075] processed_samples 16200 unjoint_samples 16200 joint_samples 49 [1044982, 733075] processed_samples 16202 unjoint_samples 16200 joint_samples 48 [673019, 1038556] processed_samples 16202 unjoint_samples 16200 joint_samples 48 [673019, 1038556] processed_samples 16201 unjoint_samples 16200 joint_samples 49 [1046936, 903959] processed_samples 16201 unjoint_samples 16200 joint_samples 49 [1046936, 903959] [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d43cde40] mmco: unref short failure [h264 @ 0x55e9d43cde40] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d4d51500] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d44a67c0] mmco: unref short failure [h264 @ 0x55e9d44a67c0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d44a67c0] mmco: unref short failure [h264 @ 0x55e9d44a67c0] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d4d51500] mmco: unref short failure [h264 @ 0x55e9d4d51500] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure processed_samples 16300 unjoint_samples 16300 joint_samples 49 [260251, 1045336] processed_samples 16300 unjoint_samples 16300 joint_samples 49 [260251, 1045336] processed_samples 16300 unjoint_samples 16300 joint_samples 48 [663286, 1047190] processed_samples 16300 unjoint_samples 16300 joint_samples 48 [663286, 1047190] processed_samples 16300 unjoint_samples 16300 joint_samples 48 [1046751, 761613] processed_samples 16300 unjoint_samples 16300 joint_samples 48 [1046751, 761613] processed_samples 16301 unjoint_samples 16300 joint_samples 50 [203822, 1013870] processed_samples 16301 unjoint_samples 16300 joint_samples 50 [203822, 1013870] processed_samples 16300 unjoint_samples 16300 joint_samples 49 [700209, 1046672] processed_samples 16300 unjoint_samples 16300 joint_samples 49 [700209, 1046672] processed_samples 16300 unjoint_samples 16300 joint_samples 49 [1044982, 1009978] processed_samples 16301 unjoint_samples 16300 joint_samples 49 [964907, 1033598] processed_samples 16301 unjoint_samples 16300 joint_samples 49 [964907, 1033598] processed_samples 16300 unjoint_samples 16300 joint_samples 49 [1044982, 1009978] [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure processed_samples 16302 unjoint_samples 16300 joint_samples 48 [1027169, 1038556] processed_samples 16302 unjoint_samples 16300 joint_samples 48 [1027169, 1038556] [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d378a740] mmco: unref short failure [h264 @ 0x55e9d378a740] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] [h264 @ 0x55e9d0744400] mmco: unref short failure mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure processed_samples 16400 unjoint_samples 16400 joint_samples 50 [1046441, 121299] processed_samples 16400 unjoint_samples 16400 joint_samples 49 [1046797, 223654] processed_samples 16400 unjoint_samples 16400 joint_samples 50 [1046441, 121299] processed_samples 16400 unjoint_samples 16400 joint_samples 49 [1046797, 223654] processed_samples 16400 unjoint_samples 16400 joint_samples 49 [510142, 1045336] processed_samples 16400 unjoint_samples 16400 joint_samples 49 [510142, 1045336] processed_samples 16400 unjoint_samples 16400 joint_samples 50 [222064, 1046266] processed_samples 16400 unjoint_samples 16400 joint_samples 50 [222064, 1046266] processed_samples 16401 unjoint_samples 16400 joint_samples 50 [515892, 1013870] processed_samples 16401 unjoint_samples 16400 joint_samples 50 [1042633, 275425] processed_samples 16401 unjoint_samples 16400 joint_samples 50 [515892, 1013870] processed_samples 16401 unjoint_samples 16400 joint_samples 50 [1042633, 275425] processed_samples 16400 unjoint_samples 16400 joint_samples 48 [1002166, 1047190] processed_samples 16402 unjoint_samples 16400 joint_samples 49 [257229, 1046364] processed_samples 16400 unjoint_samples 16400 joint_samples 48 [1002166, 1047190] processed_samples 16402 unjoint_samples 16400 joint_samples 49 [257229, 1046364] [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure processed_samples 16500 unjoint_samples 16500 joint_samples 49 [1046797, 549845] processed_samples 16500 unjoint_samples 16500 joint_samples 49 [1046797, 549845] processed_samples 16500 unjoint_samples 16500 joint_samples 49 [772487, 1045336] processed_samples 16500 unjoint_samples 16500 joint_samples 49 [1034958, 306329] processed_samples 16500 unjoint_samples 16500 joint_samples 49 [1034958, 306329] processed_samples 16500 unjoint_samples 16500 joint_samples 49 [772487, 1045336] processed_samples 16500 unjoint_samples 16500 joint_samples 50 [1046441, 528036] processed_samples 16500 unjoint_samples 16500 joint_samples 50 [1046441, 528036] processed_samples 16501 unjoint_samples 16500 joint_samples 50 [1042633, 651886] processed_samples 16501 unjoint_samples 16500 joint_samples 50 [1042633, 651886] processed_samples 16500 unjoint_samples 16500 joint_samples 50 [472373, 1046266] processed_samples 16500 unjoint_samples 16500 joint_samples 50 [472373, 1046266] processed_samples 16501 unjoint_samples 16500 joint_samples 50 [796122, 1013870] processed_samples 16501 unjoint_samples 16500 joint_samples 50 [796122, 1013870] processed_samples 16502 unjoint_samples 16500 joint_samples 49 [699160, 1046364] processed_samples 16502 unjoint_samples 16500 joint_samples 49 [699160, 1046364] [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure processed_samples 16600 unjoint_samples 16600 joint_samples 50 [1047071, 212077] processed_samples 16600 unjoint_samples 16600 joint_samples 50 [1047071, 212077] processed_samples 16600 unjoint_samples 16600 joint_samples 49 [1034958, 596533] processed_samples 16600 unjoint_samples 16600 joint_samples 49 [1034958, 596533] processed_samples 16601 unjoint_samples 16600 joint_samples 51 [73945, 1032022] processed_samples 16601 unjoint_samples 16600 joint_samples 51 [73945, 1032022] processed_samples 16600 unjoint_samples 16600 joint_samples 50 [781446, 1046266] processed_samples 16600 unjoint_samples 16600 joint_samples 50 [781446, 1046266] processed_samples 16601 unjoint_samples 16600 joint_samples 50 [1042633, 914414] processed_samples 16601 unjoint_samples 16600 joint_samples 50 [1042633, 914414] processed_samples 16600 unjoint_samples 16600 joint_samples 49 [1046797, 812359] processed_samples 16600 unjoint_samples 16600 joint_samples 49 [1046797, 812359] processed_samples 16600 unjoint_samples 16600 joint_samples 50 [1046441, 778007] processed_samples 16600 unjoint_samples 16600 joint_samples 50 [1046441, 778007] processed_samples 16602 unjoint_samples 16600 joint_samples 49 [968462, 1046364] processed_samples 16602 unjoint_samples 16600 joint_samples 49 [968462, 1046364] [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d568b380] mmco: unref short failure [h264 @ 0x55e9d568b380] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d374b300] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure processed_samples 16700 unjoint_samples 16700 joint_samples 50 [68685, 1044537] processed_samples 16700 unjoint_samples 16700 joint_samples 50 [68685, 1044537] processed_samples 16700 unjoint_samples 16700 joint_samples 51 [146974, 1047027] processed_samples 16700 unjoint_samples 16700 joint_samples 51 [146974, 1047027] processed_samples 16700 unjoint_samples 16700 joint_samples 51 [1038897, 68355] processed_samples 16700 unjoint_samples 16700 joint_samples 51 [1038897, 68355] processed_samples 16701 unjoint_samples 16700 joint_samples 51 [217228, 1047430] processed_samples 16701 unjoint_samples 16700 joint_samples 51 [217228, 1047430] processed_samples 16700 unjoint_samples 16700 joint_samples 50 [1047071, 569453] processed_samples 16700 unjoint_samples 16700 joint_samples 50 [1047071, 569453] processed_samples 16701 unjoint_samples 16700 joint_samples 51 [402454, 1032022] processed_samples 16701 unjoint_samples 16700 joint_samples 51 [402454, 1032022] processed_samples 16700 unjoint_samples 16700 joint_samples 49 [1034958, 943750] processed_samples 16700 unjoint_samples 16700 joint_samples 49 [1034958, 943750] processed_samples 16702 unjoint_samples 16700 joint_samples 50 [224787, 1046364] processed_samples 16702 unjoint_samples 16700 joint_samples 50 [224787, 1046364] [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55e9d2636880] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55e9d2636880] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] illegal short term buffer state detected [h264 @ 0x55f3aa9f1e00] illegal short term buffer state detected [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure processed_samples 16800 unjoint_samples 16800 joint_samples 50 [499311, 1044537] processed_samples 16800 unjoint_samples 16800 joint_samples 50 [499311, 1044537] processed_samples 16800 unjoint_samples 16800 joint_samples 50 [1047190, 114913] processed_samples 16800 unjoint_samples 16800 joint_samples 50 [1047190, 114913] processed_samples 16800 unjoint_samples 16800 joint_samples 51 [433090, 1047027] processed_samples 16800 unjoint_samples 16800 joint_samples 51 [433090, 1047027] processed_samples 16800 unjoint_samples 16800 joint_samples 51 [1038897, 361057] processed_samples 16800 unjoint_samples 16800 joint_samples 51 [1038897, 361057] [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure processed_samples 16801 unjoint_samples 16800 joint_samples 51 [592019, 1047430] processed_samples 16801 unjoint_samples 16800 joint_samples 51 [680565, 1032022] processed_samples 16801 unjoint_samples 16800 joint_samples 51 [680565, 1032022] processed_samples 16801 unjoint_samples 16800 joint_samples 51 [592019, 1047430] processed_samples 16800 unjoint_samples 16800 joint_samples 50 [1047071, 877866] processed_samples 16800 unjoint_samples 16800 joint_samples 50 [1047071, 877866] processed_samples 16802 unjoint_samples 16800 joint_samples 50 [522879, 1046364] processed_samples 16802 unjoint_samples 16800 joint_samples 50 [522879, 1046364] [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d7323280] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure processed_samples 16900 unjoint_samples 16900 joint_samples 51 [1038897, 648629] processed_samples 16900 unjoint_samples 16900 joint_samples 51 [1038897, 648629] [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure processed_samples 16900 unjoint_samples 16900 joint_samples 50 [1047190, 414147] processed_samples 16900 unjoint_samples 16900 joint_samples 50 [1047190, 414147] processed_samples 16900 unjoint_samples 16900 joint_samples 50 [738590, 1044537] processed_samples 16900 unjoint_samples 16900 joint_samples 50 [738590, 1044537] processed_samples 16900 unjoint_samples 16900 joint_samples 51 [1047071, 58387] processed_samples 16900 unjoint_samples 16900 joint_samples 51 [1047071, 58387] processed_samples 16901 unjoint_samples 16900 joint_samples 52 [1046590, 79110] processed_samples 16901 unjoint_samples 16900 joint_samples 52 [1046590, 79110] processed_samples 16900 unjoint_samples 16900 joint_samples 51 [819573, 1047027] processed_samples 16900 unjoint_samples 16900 joint_samples 51 [819573, 1047027] processed_samples 16901 unjoint_samples 16900 joint_samples 51 [988765, 1032022] processed_samples 16901 unjoint_samples 16900 joint_samples 51 [988765, 1032022] processed_samples 16902 unjoint_samples 16900 joint_samples 50 [854002, 1046364] processed_samples 16902 unjoint_samples 16900 joint_samples 50 [854002, 1046364] [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3aacdd540] mmco: unref short failure [h264 @ 0x55f3aacdd540] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aacdd540] mmco: unref short failure [h264 @ 0x55f3aacdd540] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3af70c880] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3af70c880] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure processed_samples 17000 unjoint_samples 17000 joint_samples 51 [14195, 1046876] processed_samples 17000 unjoint_samples 17000 joint_samples 51 [14195, 1046876] processed_samples 17000 unjoint_samples 17000 joint_samples 52 [1046562, 20156] processed_samples 17000 unjoint_samples 17000 joint_samples 52 [1046562, 20156] processed_samples 17000 unjoint_samples 17000 joint_samples 51 [1047071, 325818] processed_samples 17000 unjoint_samples 17000 joint_samples 51 [1047071, 325818] processed_samples 17000 unjoint_samples 17000 joint_samples 51 [1038897, 919753] processed_samples 17000 unjoint_samples 17000 joint_samples 51 [1038897, 919753] [h264 @ 0x55f3aa201540] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure processed_samples 17001 unjoint_samples 17000 joint_samples 52 [345249, 1047305] processed_samples 17001 unjoint_samples 17000 joint_samples 52 [345249, 1047305] processed_samples 17000 unjoint_samples 17000 joint_samples 50 [1047190, 755701] processed_samples 17000 unjoint_samples 17000 joint_samples 50 [1047190, 755701] processed_samples 17001 unjoint_samples 17000 joint_samples 52 [1046590, 526218] processed_samples 17001 unjoint_samples 17000 joint_samples 52 [1046590, 526218] processed_samples 17002 unjoint_samples 17000 joint_samples 51 [1041620, 155299] processed_samples 17002 unjoint_samples 17000 joint_samples 51 [1041620, 155299] [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3af70c880] mmco: unref short failure [h264 @ 0x55f3af70c880] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure processed_samples 17100 unjoint_samples 17100 joint_samples 51 [51244, 1044498] processed_samples 17100 unjoint_samples 17100 joint_samples 51 [51244, 1044498] processed_samples 17100 unjoint_samples 17100 joint_samples 51 [291157, 1046876] processed_samples 17100 unjoint_samples 17100 joint_samples 51 [291157, 1046876] processed_samples 17100 unjoint_samples 17100 joint_samples 52 [1041190, 167774] processed_samples 17100 unjoint_samples 17100 joint_samples 52 [1041190, 167774] processed_samples 17100 unjoint_samples 17100 joint_samples 52 [1046562, 349521] processed_samples 17100 unjoint_samples 17100 joint_samples 52 [1046562, 349521] processed_samples 17100 unjoint_samples 17100 joint_samples 51 [1047071, 566748] processed_samples 17100 unjoint_samples 17100 joint_samples 51 [1047071, 566748] processed_samples 17101 unjoint_samples 17100 joint_samples 52 [658564, 1047305] processed_samples 17101 unjoint_samples 17100 joint_samples 52 [658564, 1047305] processed_samples 17101 unjoint_samples 17100 joint_samples 52 [1046590, 947408] processed_samples 17102 unjoint_samples 17100 joint_samples 51 [1041620, 437902] processed_samples 17102 unjoint_samples 17100 joint_samples 51 [1041620, 437902] processed_samples 17101 unjoint_samples 17100 joint_samples 52 [1046590, 947408] [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d34cfd80] mmco: unref short failure [h264 @ 0x55e9d34cfd80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a676fac0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure processed_samples 17200 unjoint_samples 17200 joint_samples 51 [687816, 1046876] processed_samples 17200 unjoint_samples 17200 joint_samples 51 [687816, 1046876] processed_samples 17200 unjoint_samples 17200 joint_samples 51 [358397, 1044498] processed_samples 17200 unjoint_samples 17200 joint_samples 51 [358397, 1044498] processed_samples 17200 unjoint_samples 17200 joint_samples 51 [1047071, 839468] processed_samples 17200 unjoint_samples 17200 joint_samples 51 [1047071, 839468] processed_samples 17201 unjoint_samples 17200 joint_samples 53 [1046590, 326433] processed_samples 17201 unjoint_samples 17200 joint_samples 53 [1046590, 326433] processed_samples 17200 unjoint_samples 17200 joint_samples 52 [1041190, 435171] processed_samples 17200 unjoint_samples 17200 joint_samples 52 [1041190, 435171] processed_samples 17200 unjoint_samples 17200 joint_samples 52 [1046562, 657998] processed_samples 17200 unjoint_samples 17200 joint_samples 52 [1046562, 657998] [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure processed_samples 17202 unjoint_samples 17200 joint_samples 51 [1041620, 722182] processed_samples 17202 unjoint_samples 17200 joint_samples 51 [1041620, 722182] processed_samples 17201 unjoint_samples 17200 joint_samples 52 [1009466, 1047305] processed_samples 17201 unjoint_samples 17200 joint_samples 52 [1009466, 1047305] [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure processed_samples 17300 unjoint_samples 17300 joint_samples 52 [39693, 1038942] processed_samples 17300 unjoint_samples 17300 joint_samples 52 [39693, 1038942] processed_samples 17300 unjoint_samples 17300 joint_samples 51 [741185, 1044498] processed_samples 17300 unjoint_samples 17300 joint_samples 51 [741185, 1044498] processed_samples 17300 unjoint_samples 17300 joint_samples 51 [1000925, 1046876] processed_samples 17300 unjoint_samples 17300 joint_samples 51 [1000925, 1046876] processed_samples 17301 unjoint_samples 17300 joint_samples 53 [1046590, 649975] processed_samples 17301 unjoint_samples 17300 joint_samples 53 [1046590, 649975] processed_samples 17300 unjoint_samples 17300 joint_samples 52 [1041190, 657337] processed_samples 17300 unjoint_samples 17300 joint_samples 52 [1041190, 657337] processed_samples 17301 unjoint_samples 17300 joint_samples 53 [1046327, 221934] processed_samples 17301 unjoint_samples 17300 joint_samples 53 [1046327, 221934] processed_samples 17300 unjoint_samples 17300 joint_samples 52 [1046562, 1029281] processed_samples 17300 unjoint_samples 17300 joint_samples 52 [1046562, 1029281] [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3af70c880] mmco: unref short failure [h264 @ 0x55f3af70c880] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure processed_samples 17302 unjoint_samples 17300 joint_samples 51 [1041620, 1027393] processed_samples 17302 unjoint_samples 17300 joint_samples 51 [1041620, 1027393] [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3a6c8d080] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3af70c880] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure processed_samples 17400 unjoint_samples 17400 joint_samples 52 [345221, 1038942] processed_samples 17400 unjoint_samples 17400 joint_samples 52 [61254, 1047348] processed_samples 17400 unjoint_samples 17400 joint_samples 52 [61254, 1047348] processed_samples 17400 unjoint_samples 17400 joint_samples 52 [345221, 1038942] processed_samples 17400 unjoint_samples 17400 joint_samples 52 [1024745, 307122] processed_samples 17400 unjoint_samples 17400 joint_samples 52 [1024745, 307122] processed_samples 17400 unjoint_samples 17400 joint_samples 53 [1046562, 334459] processed_samples 17401 unjoint_samples 17400 joint_samples 53 [1046327, 525349] processed_samples 17400 unjoint_samples 17400 joint_samples 53 [1046562, 334459] processed_samples 17401 unjoint_samples 17400 joint_samples 53 [1046327, 525349] processed_samples 17401 unjoint_samples 17400 joint_samples 53 [1046590, 895415] processed_samples 17401 unjoint_samples 17400 joint_samples 53 [1046590, 895415] processed_samples 17400 unjoint_samples 17400 joint_samples 52 [1041190, 941489] processed_samples 17402 unjoint_samples 17400 joint_samples 52 [309054, 1032229] processed_samples 17402 unjoint_samples 17400 joint_samples 52 [309054, 1032229] processed_samples 17400 unjoint_samples 17400 joint_samples 52 [1041190, 941489] [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a7574280] [h264 @ 0x55e9d2db4680] mmco: unref short failure mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d4f56680] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure processed_samples 17500 unjoint_samples 17500 joint_samples 52 [738951, 1038942] processed_samples 17500 unjoint_samples 17500 joint_samples 52 [738951, 1038942] processed_samples 17500 unjoint_samples 17500 joint_samples 52 [293238, 1047348] processed_samples 17500 unjoint_samples 17500 joint_samples 52 [293238, 1047348] processed_samples 17500 unjoint_samples 17500 joint_samples 52 [1024745, 676977] processed_samples 17500 unjoint_samples 17500 joint_samples 52 [1024745, 676977] processed_samples 17500 unjoint_samples 17500 joint_samples 53 [1047087, 319650] processed_samples 17500 unjoint_samples 17500 joint_samples 53 [1047087, 319650] processed_samples 17501 unjoint_samples 17500 joint_samples 54 [195241, 1030809] processed_samples 17501 unjoint_samples 17500 joint_samples 54 [195241, 1030809] processed_samples 17500 unjoint_samples 17500 joint_samples 53 [1046562, 558676] processed_samples 17500 unjoint_samples 17500 joint_samples 53 [1046562, 558676] processed_samples 17502 unjoint_samples 17500 joint_samples 52 [539769, 1032229] processed_samples 17502 unjoint_samples 17500 joint_samples 52 [539769, 1032229] processed_samples 17501 unjoint_samples 17500 joint_samples 53 [1046327, 817598] processed_samples 17501 unjoint_samples 17500 joint_samples 53 [1046327, 817598] [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a679f200] mmco: unref short failure [h264 @ 0x55f3a679f200] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure processed_samples 17600 unjoint_samples 17600 joint_samples 53 [1047087, 676094] processed_samples 17600 unjoint_samples 17600 joint_samples 53 [1047087, 676094] processed_samples 17600 unjoint_samples 17600 joint_samples 52 [652212, 1047348] processed_samples 17600 unjoint_samples 17600 joint_samples 52 [652212, 1047348] processed_samples 17600 unjoint_samples 17600 joint_samples 52 [1029348, 1037201] processed_samples 17600 unjoint_samples 17600 joint_samples 52 [1029348, 1037201] processed_samples 17601 unjoint_samples 17600 joint_samples 54 [176718, 1046388] processed_samples 17601 unjoint_samples 17600 joint_samples 54 [176718, 1046388] processed_samples 17600 unjoint_samples 17600 joint_samples 53 [1046562, 909956] processed_samples 17600 unjoint_samples 17600 joint_samples 53 [1046562, 909956] processed_samples 17601 unjoint_samples 17600 joint_samples 54 [524230, 1030809] processed_samples 17601 unjoint_samples 17600 joint_samples 54 [524230, 1030809] processed_samples 17600 unjoint_samples 17600 joint_samples 52 [1022362, 1038942] processed_samples 17600 unjoint_samples 17600 joint_samples 52 [1022362, 1038942] processed_samples 17602 unjoint_samples 17600 joint_samples 52 [826275, 1032229] processed_samples 17602 unjoint_samples 17600 joint_samples 52 [826275, 1032229] [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d568b380] mmco: unref short failure [h264 @ 0x55e9d568b380] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure processed_samples 17700 unjoint_samples 17700 joint_samples 53 [271826, 1045620] processed_samples 17700 unjoint_samples 17700 joint_samples 53 [271826, 1045620] processed_samples 17700 unjoint_samples 17700 joint_samples 52 [965395, 1047348] processed_samples 17700 unjoint_samples 17700 joint_samples 52 [965395, 1047348] processed_samples 17700 unjoint_samples 17700 joint_samples 53 [1045563, 276075] processed_samples 17700 unjoint_samples 17700 joint_samples 53 [1045563, 276075] processed_samples 17700 unjoint_samples 17700 joint_samples 54 [368340, 1036854] processed_samples 17700 unjoint_samples 17700 joint_samples 54 [368340, 1036854] processed_samples 17702 unjoint_samples 17700 joint_samples 53 [19762, 1046745] processed_samples 17702 unjoint_samples 17700 joint_samples 53 [19762, 1046745] processed_samples 17701 unjoint_samples 17700 joint_samples 54 [509777, 1046388] processed_samples 17701 unjoint_samples 17700 joint_samples 54 [509777, 1046388] processed_samples 17700 unjoint_samples 17700 joint_samples 53 [1047087, 1043814] processed_samples 17700 unjoint_samples 17700 joint_samples 53 [1047087, 1043814] processed_samples 17701 unjoint_samples 17700 joint_samples 54 [787041, 1030809] processed_samples 17701 unjoint_samples 17700 joint_samples 54 [787041, 1030809] [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55e9d2636880] mmco: unref short failure [h264 @ 0x55e9d2636880] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d4fa9080] mmco: unref short failure [h264 @ 0x55e9d4fa9080] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d2523300] mmco: unref short failure processed_samples 17800 unjoint_samples 17800 joint_samples 53 [1042104, 190591] processed_samples 17800 unjoint_samples 17800 joint_samples 53 [1042104, 190591] processed_samples 17800 unjoint_samples 17800 joint_samples 53 [601565, 1045620] processed_samples 17800 unjoint_samples 17800 joint_samples 53 [601565, 1045620] [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure processed_samples 17800 unjoint_samples 17800 joint_samples 53 [1045563, 621621] processed_samples 17800 unjoint_samples 17800 joint_samples 53 [1045563, 621621] processed_samples 17800 unjoint_samples 17800 joint_samples 54 [1047087, 277023] processed_samples 17800 unjoint_samples 17800 joint_samples 54 [1047087, 277023] processed_samples 17801 unjoint_samples 17800 joint_samples 55 [1045564, 47013] processed_samples 17801 unjoint_samples 17800 joint_samples 55 [1045564, 47013] processed_samples 17800 unjoint_samples 17800 joint_samples 54 [698592, 1036854] processed_samples 17800 unjoint_samples 17800 joint_samples 54 [698592, 1036854] processed_samples 17802 unjoint_samples 17800 joint_samples 53 [378965, 1046745] processed_samples 17802 unjoint_samples 17800 joint_samples 53 [378965, 1046745] processed_samples 17801 unjoint_samples 17800 joint_samples 54 [775564, 1046388] processed_samples 17801 unjoint_samples 17800 joint_samples 54 [775564, 1046388] [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure processed_samples 17900 unjoint_samples 17900 joint_samples 53 [1042104, 509352] processed_samples 17900 unjoint_samples 17900 joint_samples 53 [1042104, 509352] processed_samples 17901 unjoint_samples 17900 joint_samples 55 [1045564, 489781] processed_samples 17901 unjoint_samples 17900 joint_samples 55 [1045564, 489781] processed_samples 17900 unjoint_samples 17900 joint_samples 54 [1047087, 526508] processed_samples 17900 unjoint_samples 17900 joint_samples 54 [1047087, 526508] processed_samples 17900 unjoint_samples 17900 joint_samples 53 [978508, 1045620] processed_samples 17900 unjoint_samples 17900 joint_samples 53 [978508, 1045620] processed_samples 17900 unjoint_samples 17900 joint_samples 54 [1001076, 1036854] processed_samples 17901 unjoint_samples 17900 joint_samples 55 [1046052, 33191] processed_samples 17901 unjoint_samples 17900 joint_samples 55 [1046052, 33191] processed_samples 17900 unjoint_samples 17900 joint_samples 54 [1001076, 1036854] processed_samples 17900 unjoint_samples 17900 joint_samples 53 [1045563, 887825] processed_samples 17900 unjoint_samples 17900 joint_samples 53 [1045563, 887825] processed_samples 17902 unjoint_samples 17900 joint_samples 53 [685418, 1046745] processed_samples 17902 unjoint_samples 17900 joint_samples 53 [685418, 1046745] [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure processed_samples 18000 unjoint_samples 18000 joint_samples 54 [1036533, 222254] processed_samples 18000 unjoint_samples 18000 joint_samples 54 [1036533, 222254] processed_samples 18000 unjoint_samples 18000 joint_samples 54 [203770, 1020328] processed_samples 18000 unjoint_samples 18000 joint_samples 54 [203770, 1020328] processed_samples 18000 unjoint_samples 18000 joint_samples 53 [1042104, 822606] processed_samples 18000 unjoint_samples 18000 joint_samples 53 [1042104, 822606] processed_samples 18000 unjoint_samples 18000 joint_samples 54 [1047087, 808980] processed_samples 18000 unjoint_samples 18000 joint_samples 55 [325736, 1038236] processed_samples 18000 unjoint_samples 18000 joint_samples 55 [325736, 1038236] processed_samples 18000 unjoint_samples 18000 joint_samples 54 [1047087, 808980] processed_samples 18001 unjoint_samples 18000 joint_samples 55 [1046052, 358300] processed_samples 18001 unjoint_samples 18000 joint_samples 55 [1046052, 358300] processed_samples 18001 unjoint_samples 18000 joint_samples 55 [1045564, 763059] processed_samples 18001 unjoint_samples 18000 joint_samples 55 [1045564, 763059] processed_samples 18002 unjoint_samples 18000 joint_samples 53 [953612, 1046745] processed_samples 18002 unjoint_samples 18000 joint_samples 53 [953612, 1046745] [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d4663280] mmco: unref short failure [h264 @ 0x55e9d4663280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d43cde40] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure processed_samples 18100 unjoint_samples 18100 joint_samples 54 [61446, 1030856] processed_samples 18100 unjoint_samples 18100 joint_samples 54 [61446, 1030856] processed_samples 18100 unjoint_samples 18100 joint_samples 55 [33131, 1044373] processed_samples 18100 unjoint_samples 18100 joint_samples 55 [33131, 1044373] processed_samples 18101 unjoint_samples 18100 joint_samples 56 [28612, 1047838] processed_samples 18101 unjoint_samples 18100 joint_samples 56 [28612, 1047838] processed_samples 18100 unjoint_samples 18100 joint_samples 54 [521587, 1020328] processed_samples 18100 unjoint_samples 18100 joint_samples 54 [521587, 1020328] [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure processed_samples 18100 unjoint_samples 18100 joint_samples 55 [769629, 1038236] processed_samples 18100 unjoint_samples 18100 joint_samples 55 [769629, 1038236] processed_samples 18100 unjoint_samples 18100 joint_samples 54 [1036533, 486374] processed_samples 18100 unjoint_samples 18100 joint_samples 54 [1036533, 486374] processed_samples 18101 unjoint_samples 18100 joint_samples 55 [1046052, 689495] processed_samples 18101 unjoint_samples 18100 joint_samples 55 [1046052, 689495] [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure processed_samples 18102 unjoint_samples 18100 joint_samples 54 [128658, 1046745] processed_samples 18102 unjoint_samples 18100 joint_samples 54 [128658, 1046745] [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3ad7ec440] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure processed_samples 18200 unjoint_samples 18200 joint_samples 55 [1040245, 1038398] processed_samples 18200 unjoint_samples 18200 joint_samples 55 [1040245, 1038398] processed_samples 18200 unjoint_samples 18200 joint_samples 54 [314993, 1030856] processed_samples 18200 unjoint_samples 18200 joint_samples 54 [314993, 1030856] [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure processed_samples 18200 unjoint_samples 18200 joint_samples 55 [335084, 1044373] processed_samples 18200 unjoint_samples 18200 joint_samples 55 [335084, 1044373] processed_samples 18201 unjoint_samples 18200 joint_samples 56 [342172, 1047838] processed_samples 18201 unjoint_samples 18200 joint_samples 56 [342172, 1047838] processed_samples 18200 unjoint_samples 18200 joint_samples 54 [1036533, 779078] processed_samples 18200 unjoint_samples 18200 joint_samples 54 [1036533, 779078] processed_samples 18200 unjoint_samples 18200 joint_samples 54 [842713, 1020328] processed_samples 18200 unjoint_samples 18200 joint_samples 54 [842713, 1020328] processed_samples 18202 unjoint_samples 18200 joint_samples 54 [548944, 1046745] processed_samples 18202 unjoint_samples 18200 joint_samples 54 [548944, 1046745] processed_samples 18201 unjoint_samples 18200 joint_samples 55 [1046052, 996889] processed_samples 18201 unjoint_samples 18200 joint_samples 55 [1046052, 996889] [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure processed_samples 18300 unjoint_samples 18300 joint_samples 55 [1042584, 55279] processed_samples 18300 unjoint_samples 18300 joint_samples 55 [1042584, 55279] processed_samples 18300 unjoint_samples 18300 joint_samples 55 [1024704, 240976] processed_samples 18300 unjoint_samples 18300 joint_samples 55 [1024704, 240976] processed_samples 18300 unjoint_samples 18300 joint_samples 55 [580055, 1044373] processed_samples 18300 unjoint_samples 18300 joint_samples 55 [580055, 1044373] processed_samples 18300 unjoint_samples 18300 joint_samples 56 [1040245, 325613] processed_samples 18300 unjoint_samples 18300 joint_samples 56 [1040245, 325613] processed_samples 18301 unjoint_samples 18300 joint_samples 56 [233796, 1044674] processed_samples 18301 unjoint_samples 18300 joint_samples 56 [233796, 1044674] processed_samples 18301 unjoint_samples 18300 joint_samples 56 [695618, 1047838] processed_samples 18301 unjoint_samples 18300 joint_samples 56 [695618, 1047838] processed_samples 18300 unjoint_samples 18300 joint_samples 54 [757102, 1030856] processed_samples 18300 unjoint_samples 18300 joint_samples 54 [757102, 1030856] processed_samples 18302 unjoint_samples 18300 joint_samples 54 [817573, 1046745] processed_samples 18302 unjoint_samples 18300 joint_samples 54 [817573, 1046745] [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d4fa9080] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa8fcd80] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure processed_samples 18400 unjoint_samples 18400 joint_samples 55 [1024704, 599904] processed_samples 18400 unjoint_samples 18400 joint_samples 55 [1024704, 599904] [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure processed_samples 18400 unjoint_samples 18400 joint_samples 55 [111049, 1037498] processed_samples 18400 unjoint_samples 18400 joint_samples 55 [111049, 1037498] processed_samples 18400 unjoint_samples 18400 joint_samples 55 [1042584, 324034] processed_samples 18400 unjoint_samples 18400 joint_samples 55 [1042584, 324034] processed_samples 18400 unjoint_samples 18400 joint_samples 56 [1040245, 611925] processed_samples 18400 unjoint_samples 18400 joint_samples 56 [1040245, 611925] processed_samples 18400 unjoint_samples 18400 joint_samples 55 [960242, 1044373] processed_samples 18400 unjoint_samples 18400 joint_samples 55 [960242, 1044373] processed_samples 18401 unjoint_samples 18400 joint_samples 57 [1036222, 20716] processed_samples 18401 unjoint_samples 18400 joint_samples 57 [1036222, 20716] processed_samples 18402 unjoint_samples 18400 joint_samples 55 [971976, 211939] processed_samples 18402 unjoint_samples 18400 joint_samples 55 [971976, 211939] processed_samples 18401 unjoint_samples 18400 joint_samples 56 [570887, 1044674] processed_samples 18401 unjoint_samples 18400 joint_samples 56 [570887, 1044674] [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a67ce700] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a67ce700] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3ae983680] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure processed_samples 18500 unjoint_samples 18500 joint_samples 55 [1042584, 676553] processed_samples 18500 unjoint_samples 18500 joint_samples 55 [1042584, 676553] processed_samples 18500 unjoint_samples 18500 joint_samples 56 [300141, 1046633] processed_samples 18500 unjoint_samples 18500 joint_samples 56 [300141, 1046633] processed_samples 18500 unjoint_samples 18500 joint_samples 56 [1040245, 941542] processed_samples 18500 unjoint_samples 18500 joint_samples 56 [1040245, 941542] processed_samples 18500 unjoint_samples 18500 joint_samples 55 [381010, 1037498] processed_samples 18500 unjoint_samples 18500 joint_samples 55 [381010, 1037498] [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure processed_samples 18501 unjoint_samples 18500 joint_samples 57 [1036222, 387101] processed_samples 18501 unjoint_samples 18500 joint_samples 56 [944735, 1044674] processed_samples 18501 unjoint_samples 18500 joint_samples 57 [1036222, 387101] processed_samples 18501 unjoint_samples 18500 joint_samples 56 [944735, 1044674] processed_samples 18500 unjoint_samples 18500 joint_samples 55 [1024704, 896021] processed_samples 18502 unjoint_samples 18500 joint_samples 55 [971976, 674815] processed_samples 18500 unjoint_samples 18500 joint_samples 55 [1024704, 896021] processed_samples 18502 unjoint_samples 18500 joint_samples 55 [971976, 674815] [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure /root/miniconda3/envs/py38/lib/python3.8/site-packages/PIL/TiffImagePlugin.py:870: UserWarning: Corrupt EXIF data. Expecting to read 2 bytes but only got 0. warnings.warn(str(msg)) /root/miniconda3/envs/py38/lib/python3.8/site-packages/PIL/TiffImagePlugin.py:870: UserWarning: Corrupt EXIF data. Expecting to read 2 bytes but only got 0. warnings.warn(str(msg)) [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d43cde40] mmco: unref short failure [h264 @ 0x55e9d43cde40] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure processed_samples 18600 unjoint_samples 18600 joint_samples 55 [693120, 1037498] processed_samples 18600 unjoint_samples 18600 joint_samples 55 [693120, 1037498] processed_samples 18600 unjoint_samples 18600 joint_samples 56 [1048024, 160858] processed_samples 18600 unjoint_samples 18600 joint_samples 56 [1048024, 160858] processed_samples 18600 unjoint_samples 18600 joint_samples 56 [608691, 1046633] processed_samples 18600 unjoint_samples 18600 joint_samples 56 [608691, 1046633] processed_samples 18600 unjoint_samples 18600 joint_samples 55 [1042584, 1032326] processed_samples 18600 unjoint_samples 18600 joint_samples 57 [196490, 1026574] processed_samples 18600 unjoint_samples 18600 joint_samples 57 [196490, 1026574] processed_samples 18600 unjoint_samples 18600 joint_samples 55 [1042584, 1032326] processed_samples 18601 unjoint_samples 18600 joint_samples 57 [1036222, 820592] processed_samples 18601 unjoint_samples 18600 joint_samples 57 [1036222, 820592] processed_samples 18601 unjoint_samples 18600 joint_samples 57 [1046171, 322319] processed_samples 18601 unjoint_samples 18600 joint_samples 57 [1046171, 322319] processed_samples 18602 unjoint_samples 18600 joint_samples 55 [1011502, 1010216] processed_samples 18602 unjoint_samples 18600 joint_samples 55 [1011502, 1010216] [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d2523300] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure processed_samples 18700 unjoint_samples 18700 joint_samples 56 [393716, 1041925] processed_samples 18700 unjoint_samples 18700 joint_samples 56 [393716, 1041925] processed_samples 18700 unjoint_samples 18700 joint_samples 55 [974251, 1037498] processed_samples 18700 unjoint_samples 18700 joint_samples 55 [974251, 1037498] processed_samples 18700 unjoint_samples 18700 joint_samples 56 [1048024, 424578] processed_samples 18700 unjoint_samples 18700 joint_samples 56 [1048024, 424578] processed_samples 18701 unjoint_samples 18700 joint_samples 58 [1046417, 37475] processed_samples 18701 unjoint_samples 18700 joint_samples 58 [1046417, 37475] processed_samples 18700 unjoint_samples 18700 joint_samples 57 [553466, 1026574] processed_samples 18700 unjoint_samples 18700 joint_samples 57 [553466, 1026574] processed_samples 18702 unjoint_samples 18700 joint_samples 56 [163640, 1044447] processed_samples 18702 unjoint_samples 18700 joint_samples 56 [163640, 1044447] processed_samples 18700 unjoint_samples 18700 joint_samples 56 [885331, 1046633] processed_samples 18700 unjoint_samples 18700 joint_samples 56 [885331, 1046633] processed_samples 18701 unjoint_samples 18700 joint_samples 57 [1046171, 663549] processed_samples 18701 unjoint_samples 18700 joint_samples 57 [1046171, 663549] [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d4663280] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d4fa9080] mmco: unref short failure [h264 @ 0x55e9d4fa9080] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure processed_samples 18800 unjoint_samples 18800 joint_samples 56 [228256, 1047551] processed_samples 18800 unjoint_samples 18800 joint_samples 56 [228256, 1047551] processed_samples 18800 unjoint_samples 18800 joint_samples 56 [1048024, 708348] processed_samples 18800 unjoint_samples 18800 joint_samples 56 [860669, 1041925] processed_samples 18800 unjoint_samples 18800 joint_samples 56 [1048024, 708348] processed_samples 18800 unjoint_samples 18800 joint_samples 56 [860669, 1041925] processed_samples 18800 unjoint_samples 18800 joint_samples 57 [1046422, 178078] processed_samples 18800 unjoint_samples 18800 joint_samples 57 [1046422, 178078] processed_samples 18801 unjoint_samples 18800 joint_samples 58 [1046417, 277078] processed_samples 18801 unjoint_samples 18800 joint_samples 58 [1046417, 277078] processed_samples 18800 unjoint_samples 18800 joint_samples 57 [934951, 1026574] processed_samples 18800 unjoint_samples 18800 joint_samples 57 [934951, 1026574] processed_samples 18802 unjoint_samples 18800 joint_samples 56 [459383, 1044447] processed_samples 18802 unjoint_samples 18800 joint_samples 56 [459383, 1044447] processed_samples 18801 unjoint_samples 18800 joint_samples 57 [1046171, 970514] processed_samples 18801 unjoint_samples 18800 joint_samples 57 [1046171, 970514] [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d4fa9080] mmco: unref short failure [h264 @ 0x55e9d4fa9080] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3ae983680] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure processed_samples 18900 unjoint_samples 18900 joint_samples 57 [1047342, 44903] processed_samples 18900 unjoint_samples 18900 joint_samples 57 [1047342, 44903] processed_samples 18900 unjoint_samples 18900 joint_samples 56 [483253, 1047551] processed_samples 18900 unjoint_samples 18900 joint_samples 56 [483253, 1047551] processed_samples 18900 unjoint_samples 18900 joint_samples 57 [1046422, 435202] processed_samples 18900 unjoint_samples 18900 joint_samples 57 [1046422, 435202] processed_samples 18900 unjoint_samples 18900 joint_samples 58 [1046521, 105568] processed_samples 18900 unjoint_samples 18900 joint_samples 58 [1046521, 105568] processed_samples 18901 unjoint_samples 18900 joint_samples 58 [323854, 1002276] processed_samples 18901 unjoint_samples 18900 joint_samples 58 [1046417, 528077] processed_samples 18901 unjoint_samples 18900 joint_samples 58 [1046417, 528077] processed_samples 18901 unjoint_samples 18900 joint_samples 58 [323854, 1002276] processed_samples 18900 unjoint_samples 18900 joint_samples 56 [1048024, 1032992] processed_samples 18900 unjoint_samples 18900 joint_samples 56 [1048024, 1032992] processed_samples 18902 unjoint_samples 18900 joint_samples 56 [822513, 1044447] processed_samples 18902 unjoint_samples 18900 joint_samples 56 [822513, 1044447] [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d34e9e40] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure processed_samples 19000 unjoint_samples 19000 joint_samples 57 [1047342, 297915] processed_samples 19000 unjoint_samples 19000 joint_samples 57 [1047342, 297915] processed_samples 19000 unjoint_samples 19000 joint_samples 57 [298798, 1044033] processed_samples 19000 unjoint_samples 19000 joint_samples 57 [298798, 1044033] processed_samples 19000 unjoint_samples 19000 joint_samples 58 [1046521, 351095] processed_samples 19000 unjoint_samples 19000 joint_samples 58 [1046521, 351095] processed_samples 19000 unjoint_samples 19000 joint_samples 57 [1046422, 740864] processed_samples 19000 unjoint_samples 19000 joint_samples 57 [1046422, 740864] processed_samples 19000 unjoint_samples 19000 joint_samples 56 [890183, 1047551] processed_samples 19000 unjoint_samples 19000 joint_samples 56 [890183, 1047551] processed_samples 19001 unjoint_samples 19000 joint_samples 58 [653387, 1002276] processed_samples 19001 unjoint_samples 19000 joint_samples 58 [653387, 1002276] processed_samples 19002 unjoint_samples 19000 joint_samples 57 [1045663, 202222] processed_samples 19002 unjoint_samples 19000 joint_samples 57 [1045663, 202222] processed_samples 19001 unjoint_samples 19000 joint_samples 58 [1046417, 929762] processed_samples 19001 unjoint_samples 19000 joint_samples 58 [1046417, 929762] [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3a679f200] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a6ba3740] mmco: unref short failure [h264 @ 0x55f3a6ba3740] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure processed_samples 19100 unjoint_samples 19100 joint_samples 57 [1047132, 245604] processed_samples 19100 unjoint_samples 19100 joint_samples 57 [1047132, 245604] processed_samples 19100 unjoint_samples 19100 joint_samples 57 [1047342, 580889] processed_samples 19100 unjoint_samples 19100 joint_samples 57 [1047342, 580889] processed_samples 19100 unjoint_samples 19100 joint_samples 57 [579030, 1044033] processed_samples 19100 unjoint_samples 19100 joint_samples 57 [579030, 1044033] processed_samples 19101 unjoint_samples 19100 joint_samples 59 [186024, 1048242] processed_samples 19101 unjoint_samples 19100 joint_samples 59 [186024, 1048242] processed_samples 19100 unjoint_samples 19100 joint_samples 58 [1046422, 24068] processed_samples 19100 unjoint_samples 19100 joint_samples 58 [1046422, 24068] processed_samples 19100 unjoint_samples 19100 joint_samples 58 [1046521, 630521] processed_samples 19100 unjoint_samples 19100 joint_samples 58 [1046521, 630521] processed_samples 19102 unjoint_samples 19100 joint_samples 57 [1045663, 485954] processed_samples 19102 unjoint_samples 19100 joint_samples 57 [1045663, 485954] processed_samples 19101 unjoint_samples 19100 joint_samples 58 [998023, 1002276] processed_samples 19101 unjoint_samples 19100 joint_samples 58 [998023, 1002276] [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d2389280] mmco: unref short failure [h264 @ 0x55f3a6b80b40] mmco: unref short failure [h264 @ 0x55f3a6b80b40] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a75eed00] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a75eed00] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a6da9240] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure processed_samples 19200 unjoint_samples 19200 joint_samples 57 [870000, 1044033] processed_samples 19200 unjoint_samples 19200 joint_samples 57 [870000, 1044033] processed_samples 19200 unjoint_samples 19200 joint_samples 57 [1047132, 528328] processed_samples 19200 unjoint_samples 19200 joint_samples 57 [1047132, 528328] processed_samples 19201 unjoint_samples 19200 joint_samples 59 [179322, 1046503] processed_samples 19200 unjoint_samples 19200 joint_samples 57 [1047342, 929549] processed_samples 19200 unjoint_samples 19200 joint_samples 58 [1046521, 966011] processed_samples 19200 unjoint_samples 19200 joint_samples 58 [1046422, 486933] processed_samples 19200 unjoint_samples 19200 joint_samples 58 [1046422, 486933] processed_samples 19200 unjoint_samples 19200 joint_samples 57 [1047342, 929549] processed_samples 19201 unjoint_samples 19200 joint_samples 59 [179322, 1046503] processed_samples 19200 unjoint_samples 19200 joint_samples 58 [1046521, 966011] processed_samples 19201 unjoint_samples 19200 joint_samples 59 [507061, 1048242] processed_samples 19201 unjoint_samples 19200 joint_samples 59 [507061, 1048242] processed_samples 19202 unjoint_samples 19200 joint_samples 57 [1045663, 774680] processed_samples 19202 unjoint_samples 19200 joint_samples 57 [1045663, 774680] [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3ac043b80] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a66c2b00] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a66c2b00] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure processed_samples 19300 unjoint_samples 19300 joint_samples 58 [223340, 1035781] processed_samples 19300 unjoint_samples 19300 joint_samples 57 [1047132, 780702] processed_samples 19300 unjoint_samples 19300 joint_samples 58 [223340, 1035781] processed_samples 19300 unjoint_samples 19300 joint_samples 57 [1047132, 780702] processed_samples 19300 unjoint_samples 19300 joint_samples 59 [1046521, 231329] processed_samples 19300 unjoint_samples 19300 joint_samples 58 [1030425, 198286] processed_samples 19300 unjoint_samples 19300 joint_samples 58 [1030425, 198286] processed_samples 19300 unjoint_samples 19300 joint_samples 59 [1046521, 231329] processed_samples 19300 unjoint_samples 19300 joint_samples 58 [1046422, 763897] processed_samples 19300 unjoint_samples 19300 joint_samples 58 [1046422, 763897] processed_samples 19301 unjoint_samples 19300 joint_samples 59 [837060, 1048242] processed_samples 19301 unjoint_samples 19300 joint_samples 59 [837060, 1048242] processed_samples 19302 unjoint_samples 19300 joint_samples 58 [164426, 1031036] processed_samples 19301 unjoint_samples 19300 joint_samples 59 [567519, 1046503] processed_samples 19301 unjoint_samples 19300 joint_samples 59 [567519, 1046503] processed_samples 19302 unjoint_samples 19300 joint_samples 58 [164426, 1031036] [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55e9d5827080] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3aa792140] mmco: unref short failure [h264 @ 0x55e9d446c1c0] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d4d3e940] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a7150980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure processed_samples 19400 unjoint_samples 19400 joint_samples 58 [89569, 1045029] processed_samples 19400 unjoint_samples 19400 joint_samples 58 [89569, 1045029] [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d4644440] mmco: unref short failure processed_samples 19400 unjoint_samples 19400 joint_samples 58 [1030425, 457816] processed_samples 19400 unjoint_samples 19400 joint_samples 58 [1030425, 457816] processed_samples 19400 unjoint_samples 19400 joint_samples 58 [546671, 1035781] processed_samples 19400 unjoint_samples 19400 joint_samples 58 [546671, 1035781] processed_samples 19400 unjoint_samples 19400 joint_samples 59 [1046422, 31186] processed_samples 19400 unjoint_samples 19400 joint_samples 59 [1046422, 31186] [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure processed_samples 19401 unjoint_samples 19400 joint_samples 60 [1047710, 173569] processed_samples 19401 unjoint_samples 19400 joint_samples 60 [1047710, 173569] processed_samples 19400 unjoint_samples 19400 joint_samples 59 [1046521, 563624] processed_samples 19401 unjoint_samples 19400 joint_samples 59 [826325, 1046503] processed_samples 19400 unjoint_samples 19400 joint_samples 59 [1046521, 563624] processed_samples 19401 unjoint_samples 19400 joint_samples 59 [826325, 1046503] processed_samples 19402 unjoint_samples 19400 joint_samples 58 [442827, 1031036] processed_samples 19402 unjoint_samples 19400 joint_samples 58 [442827, 1031036] [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d4a806c0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d2523300] mmco: unref short failure [h264 @ 0x55e9d2523300] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55e9d2389280] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55f3a774d600] mmco: unref short failure [h264 @ 0x55e9d2389280] mmco: unref short failure [h264 @ 0x55e9d2389280] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d43cde40] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d2e11b80] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure processed_samples 19500 unjoint_samples 19500 joint_samples 58 [1030425, 760425] processed_samples 19500 unjoint_samples 19500 joint_samples 59 [1046422, 417431] processed_samples 19500 unjoint_samples 19500 joint_samples 59 [1046521, 933313] processed_samples 19500 unjoint_samples 19500 joint_samples 58 [1030425, 760425] processed_samples 19500 unjoint_samples 19500 joint_samples 59 [1046422, 417431] processed_samples 19500 unjoint_samples 19500 joint_samples 59 [1046521, 933313] processed_samples 19500 unjoint_samples 19500 joint_samples 58 [335324, 1045029] processed_samples 19500 unjoint_samples 19500 joint_samples 58 [335324, 1045029] processed_samples 19501 unjoint_samples 19500 joint_samples 60 [1030814, 280703] processed_samples 19501 unjoint_samples 19500 joint_samples 60 [1030814, 280703] processed_samples 19501 unjoint_samples 19500 joint_samples 60 [1047710, 516715] processed_samples 19501 unjoint_samples 19500 joint_samples 60 [1047710, 516715] processed_samples 19500 unjoint_samples 19500 joint_samples 58 [920390, 1035781] processed_samples 19502 unjoint_samples 19500 joint_samples 58 [766103, 1031036] processed_samples 19500 unjoint_samples 19500 joint_samples 58 [920390, 1035781] processed_samples 19502 unjoint_samples 19500 joint_samples 58 [766103, 1031036] [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55e9d12ebf00] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d09bb780] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure processed_samples 19600 unjoint_samples 19600 joint_samples 58 [720196, 1045029] processed_samples 19600 unjoint_samples 19600 joint_samples 58 [720196, 1045029] processed_samples 19600 unjoint_samples 19600 joint_samples 59 [68625, 1044729] processed_samples 19600 unjoint_samples 19600 joint_samples 59 [68625, 1044729] processed_samples 19600 unjoint_samples 19600 joint_samples 59 [1048152, 254824] processed_samples 19600 unjoint_samples 19600 joint_samples 59 [1048152, 254824] processed_samples 19600 unjoint_samples 19600 joint_samples 60 [1046521, 247480] processed_samples 19600 unjoint_samples 19600 joint_samples 60 [1046521, 247480] processed_samples 19601 unjoint_samples 19600 joint_samples 60 [1030814, 534218] processed_samples 19601 unjoint_samples 19600 joint_samples 60 [1030814, 534218] processed_samples 19601 unjoint_samples 19600 joint_samples 60 [1047710, 784490] processed_samples 19601 unjoint_samples 19600 joint_samples 60 [1047710, 784490] processed_samples 19600 unjoint_samples 19600 joint_samples 59 [1046422, 774090] processed_samples 19600 unjoint_samples 19600 joint_samples 59 [1046422, 774090] processed_samples 19602 unjoint_samples 19600 joint_samples 58 [1043562, 1042569] processed_samples 19602 unjoint_samples 19600 joint_samples 58 [1043562, 1042569] [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3aae2b440] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d3caab40] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d2523300] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55e9d1152040] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure processed_samples 19700 unjoint_samples 19700 joint_samples 59 [1047030, 33986] processed_samples 19700 unjoint_samples 19700 joint_samples 59 [1047030, 33986] processed_samples 19700 unjoint_samples 19700 joint_samples 60 [77976, 1047038] processed_samples 19700 unjoint_samples 19700 joint_samples 60 [77976, 1047038] processed_samples 19700 unjoint_samples 19700 joint_samples 59 [1048152, 494007] processed_samples 19700 unjoint_samples 19700 joint_samples 59 [1048152, 494007] processed_samples 19701 unjoint_samples 19700 joint_samples 61 [145720, 1023627] processed_samples 19701 unjoint_samples 19700 joint_samples 61 [145720, 1023627] processed_samples 19700 unjoint_samples 19700 joint_samples 60 [1046521, 507727] processed_samples 19700 unjoint_samples 19700 joint_samples 60 [1046521, 507727] [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure processed_samples 19700 unjoint_samples 19700 joint_samples 59 [469325, 1044729] processed_samples 19700 unjoint_samples 19700 joint_samples 59 [469325, 1044729] processed_samples 19701 unjoint_samples 19700 joint_samples 60 [1030814, 1031685] processed_samples 19701 unjoint_samples 19700 joint_samples 60 [1030814, 1031685] processed_samples 19702 unjoint_samples 19700 joint_samples 59 [328456, 1048099] processed_samples 19702 unjoint_samples 19700 joint_samples 59 [328456, 1048099] [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55f3ad5e0780] mmco: unref short failure [h264 @ 0x55f3a74d5a80] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d2cb1700] mmco: unref short failure [h264 @ 0x55e9d2636880] mmco: unref short failure [h264 @ 0x55e9d2636880] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure processed_samples 19800 unjoint_samples 19800 joint_samples 59 [1047030, 387512] processed_samples 19800 unjoint_samples 19800 joint_samples 59 [1047030, 387512] processed_samples 19800 unjoint_samples 19800 joint_samples 59 [1048152, 875204] processed_samples 19800 unjoint_samples 19800 joint_samples 59 [1048152, 875204] processed_samples 19800 unjoint_samples 19800 joint_samples 59 [871112, 1044729] processed_samples 19800 unjoint_samples 19800 joint_samples 59 [871112, 1044729] processed_samples 19800 unjoint_samples 19800 joint_samples 60 [402568, 1047038] processed_samples 19800 unjoint_samples 19800 joint_samples 60 [402568, 1047038] processed_samples 19801 unjoint_samples 19800 joint_samples 61 [508112, 1023627] processed_samples 19801 unjoint_samples 19800 joint_samples 61 [508112, 1023627] processed_samples 19801 unjoint_samples 19800 joint_samples 61 [282715, 1047154] processed_samples 19800 unjoint_samples 19800 joint_samples 60 [1046521, 828185] processed_samples 19801 unjoint_samples 19800 joint_samples 61 [282715, 1047154] processed_samples 19800 unjoint_samples 19800 joint_samples 60 [1046521, 828185] processed_samples 19802 unjoint_samples 19800 joint_samples 59 [655358, 1048099] processed_samples 19802 unjoint_samples 19800 joint_samples 59 [655358, 1048099] [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55e9d0b25200] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55f3a917d2c0] mmco: unref short failure [h264 @ 0x55e9d7323280] mmco: unref short failure [h264 @ 0x55f3aa6b83c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure processed_samples 19900 unjoint_samples 19900 joint_samples 60 [1048152, 108103] processed_samples 19900 unjoint_samples 19900 joint_samples 60 [1046673, 102996] processed_samples 19900 unjoint_samples 19900 joint_samples 60 [1046673, 102996] processed_samples 19900 unjoint_samples 19900 joint_samples 60 [1048152, 108103] processed_samples 19900 unjoint_samples 19900 joint_samples 59 [1047030, 644320] processed_samples 19900 unjoint_samples 19900 joint_samples 59 [1047030, 644320] [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure processed_samples 19900 unjoint_samples 19900 joint_samples 60 [843266, 1047038] [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure processed_samples 19900 unjoint_samples 19900 joint_samples 61 [61254, 1048236] processed_samples 19900 unjoint_samples 19900 joint_samples 61 [61254, 1048236] processed_samples 19900 unjoint_samples 19900 joint_samples 60 [843266, 1047038] processed_samples 19901 unjoint_samples 19900 joint_samples 61 [635758, 1047154] processed_samples 19901 unjoint_samples 19900 joint_samples 61 [635758, 1047154] processed_samples 19901 unjoint_samples 19900 joint_samples 61 [725053, 1023627] processed_samples 19901 unjoint_samples 19900 joint_samples 61 [725053, 1023627] processed_samples 19902 unjoint_samples 19900 joint_samples 60 [1046651, 23114] processed_samples 19902 unjoint_samples 19900 joint_samples 60 [1046651, 23114] [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d0bbdf40] mmco: unref short failure [h264 @ 0x55e9d0bbdf40] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9c65f7380] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d9362440] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d2523300] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55e9d473cd80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55f3a8bed980] mmco: unref short failure [h264 @ 0x55e9d2523300] mmco: unref short failure [h264 @ 0x55e9d2523300] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d2523300] mmco: unref short failure [h264 @ 0x55e9d2523300] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55f3aa840600] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure processed_samples 20000 unjoint_samples 20000 joint_samples 61 [1033658, 196475] processed_samples 20000 unjoint_samples 20000 joint_samples 61 [1033658, 196475] processed_samples 20000 unjoint_samples 20000 joint_samples 60 [1046673, 397588] processed_samples 20000 unjoint_samples 20000 joint_samples 60 [1046673, 397588] processed_samples 20000 unjoint_samples 20000 joint_samples 59 [1047030, 923819] processed_samples 20000 unjoint_samples 20000 joint_samples 59 [1047030, 923819] processed_samples 20000 unjoint_samples 20000 joint_samples 60 [1048152, 380115] processed_samples 20000 unjoint_samples 20000 joint_samples 60 [1048152, 380115] processed_samples 20001 unjoint_samples 20000 joint_samples 61 [1037583, 1039784] processed_samples 20001 unjoint_samples 20000 joint_samples 61 [1037583, 1039784] processed_samples 20002 unjoint_samples 20000 joint_samples 60 [1046651, 308279] processed_samples 20002 unjoint_samples 20000 joint_samples 60 [1046651, 308279] processed_samples 20000 unjoint_samples 20000 joint_samples 61 [354851, 1048236] processed_samples 20000 unjoint_samples 20000 joint_samples 61 [354851, 1048236] [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure processed_samples 20001 unjoint_samples 20000 joint_samples 61 [912020, 1047154] processed_samples 20001 unjoint_samples 20000 joint_samples 61 [912020, 1047154] [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55e9d4f0f640] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d34cfd80] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55f3a775e700] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure processed_samples 20100 unjoint_samples 20100 joint_samples 60 [1048152, 677971] processed_samples 20100 unjoint_samples 20100 joint_samples 60 [1047030, 189045] processed_samples 20100 unjoint_samples 20100 joint_samples 60 [1048152, 677971] processed_samples 20100 unjoint_samples 20100 joint_samples 60 [1047030, 189045] processed_samples 20100 unjoint_samples 20100 joint_samples 61 [1033658, 547814] processed_samples 20100 unjoint_samples 20100 joint_samples 61 [1033658, 547814] processed_samples 20100 unjoint_samples 20100 joint_samples 61 [581308, 1048236] processed_samples 20100 unjoint_samples 20100 joint_samples 61 [581308, 1048236] processed_samples 20101 unjoint_samples 20100 joint_samples 62 [83548, 1047574] processed_samples 20101 unjoint_samples 20100 joint_samples 62 [83548, 1047574] processed_samples 20101 unjoint_samples 20100 joint_samples 62 [318987, 1046492] processed_samples 20101 unjoint_samples 20100 joint_samples 62 [318987, 1046492] processed_samples 20100 unjoint_samples 20100 joint_samples 60 [1046673, 756436] processed_samples 20100 unjoint_samples 20100 joint_samples 60 [1046673, 756436] [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure processed_samples 20102 unjoint_samples 20100 joint_samples 60 [1046651, 556626] processed_samples 20102 unjoint_samples 20100 joint_samples 60 [1046651, 556626] [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55e9d593f7c0] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a67ce700] mmco: unref short failure [h264 @ 0x55f3a67ce700] mmco: unref short failure [h264 @ 0x55e9d568b380] mmco: unref short failure [h264 @ 0x55e9d568b380] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d32c2340] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55f3aa86de40] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d5969500] mmco: unref short failure [h264 @ 0x55f3ac89de80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55e9d12de740] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55f3a83fbec0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55f3aa5e9240] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure processed_samples 20200 unjoint_samples 20200 joint_samples 61 [120071, 1037401] processed_samples 20200 unjoint_samples 20200 joint_samples 61 [120071, 1037401] [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure processed_samples 20200 unjoint_samples 20200 joint_samples 60 [1047030, 515462] processed_samples 20200 unjoint_samples 20200 joint_samples 60 [1047030, 515462] [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure processed_samples 20201 unjoint_samples 20200 joint_samples 62 [556600, 1046492] processed_samples 20201 unjoint_samples 20200 joint_samples 62 [556600, 1046492] processed_samples 20200 unjoint_samples 20200 joint_samples 61 [1033658, 872536] processed_samples 20200 unjoint_samples 20200 joint_samples 61 [1033658, 872536] processed_samples 20201 unjoint_samples 20200 joint_samples 62 [322720, 1047574] processed_samples 20201 unjoint_samples 20200 joint_samples 62 [322720, 1047574] processed_samples 20200 unjoint_samples 20200 joint_samples 60 [1048152, 1009992] processed_samples 20200 unjoint_samples 20200 joint_samples 60 [1048152, 1009992] processed_samples 20200 unjoint_samples 20200 joint_samples 61 [947896, 1048236] processed_samples 20200 unjoint_samples 20200 joint_samples 61 [947896, 1048236] processed_samples 20202 unjoint_samples 20200 joint_samples 60 [1046651, 848258] processed_samples 20202 unjoint_samples 20200 joint_samples 60 [1046651, 848258] [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55e9d0797740] mmco: unref short failure [h264 @ 0x55f3a6e7b4c0] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d2db4680] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3a79d6300] mmco: unref short failure [h264 @ 0x55e9d2e14880] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a8036f00] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d1e47d40] mmco: unref short failure processed_samples 20300 unjoint_samples 20300 joint_samples 61 [262482, 1046224] processed_samples 20300 unjoint_samples 20300 joint_samples 61 [262482, 1046224] processed_samples 20300 unjoint_samples 20300 joint_samples 61 [366070, 1037401] processed_samples 20300 unjoint_samples 20300 joint_samples 61 [366070, 1037401] processed_samples 20300 unjoint_samples 20300 joint_samples 62 [1047712, 191352] processed_samples 20300 unjoint_samples 20300 joint_samples 62 [1047712, 191352] processed_samples 20300 unjoint_samples 20300 joint_samples 62 [1043723, 129548] processed_samples 20300 unjoint_samples 20300 joint_samples 62 [1043723, 129548] processed_samples 20300 unjoint_samples 20300 joint_samples 60 [1047030, 816473] processed_samples 20300 unjoint_samples 20300 joint_samples 60 [1047030, 816473] processed_samples 20301 unjoint_samples 20300 joint_samples 62 [710476, 1047574] processed_samples 20301 unjoint_samples 20300 joint_samples 62 [710476, 1047574] processed_samples 20301 unjoint_samples 20300 joint_samples 62 [889606, 1046492] processed_samples 20301 unjoint_samples 20300 joint_samples 62 [889606, 1046492] processed_samples 20302 unjoint_samples 20300 joint_samples 61 [305720, 1041557] processed_samples 20302 unjoint_samples 20300 joint_samples 61 [305720, 1041557] [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55f3a7e93bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55e9d35a9580] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d47735c0] mmco: unref short failure [h264 @ 0x55f3aa6334c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55e9d3186e80] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aa768400] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3aa5fff80] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d7323280] mmco: unref short failure [h264 @ 0x55e9d7323280] mmco: unref short failure processed_samples 20400 unjoint_samples 20400 joint_samples 61 [733685, 1037401] processed_samples 20400 unjoint_samples 20400 joint_samples 61 [733685, 1037401] processed_samples 20400 unjoint_samples 20400 joint_samples 61 [608699, 1046224] processed_samples 20400 unjoint_samples 20400 joint_samples 61 [608699, 1046224] [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55f3a7e8ddc0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure processed_samples 20400 unjoint_samples 20400 joint_samples 62 [1043723, 472234] processed_samples 20400 unjoint_samples 20400 joint_samples 62 [1043723, 472234] processed_samples 20400 unjoint_samples 20400 joint_samples 62 [1047712, 526416] processed_samples 20400 unjoint_samples 20400 joint_samples 61 [108388, 1028031] processed_samples 20400 unjoint_samples 20400 joint_samples 61 [108388, 1028031] processed_samples 20400 unjoint_samples 20400 joint_samples 62 [1047712, 526416] processed_samples 20401 unjoint_samples 20400 joint_samples 63 [1026759, 188755] processed_samples 20401 unjoint_samples 20400 joint_samples 63 [1026759, 188755] [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55f3a8f3ab00] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure processed_samples 20402 unjoint_samples 20400 joint_samples 61 [738642, 1041557] processed_samples 20402 unjoint_samples 20400 joint_samples 61 [738642, 1041557] processed_samples 20401 unjoint_samples 20400 joint_samples 62 [921008, 1047574] processed_samples 20401 unjoint_samples 20400 joint_samples 62 [921008, 1047574] [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55f3aac3b340] mmco: unref short failure [h264 @ 0x55e9d0725780] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55e9d64f2540] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55e9d7c0e580] mmco: unref short failure [h264 @ 0x55f3a813bcc0] mmco: unref short failure [h264 @ 0x55e9d11699c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d352ed40] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3aa86b400] mmco: unref short failure [h264 @ 0x55e9d4486ac0] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d11e4680] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3aa7ced80] mmco: unref short failure [h264 @ 0x55e9d534aac0] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55e9d3e8fa00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55f3a813ccc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure processed_samples 20500 unjoint_samples 20500 joint_samples 61 [378061, 1028031] processed_samples 20500 unjoint_samples 20500 joint_samples 61 [378061, 1028031] processed_samples 20501 unjoint_samples 20500 joint_samples 63 [1047398, 176616] processed_samples 20501 unjoint_samples 20500 joint_samples 63 [1026759, 530036] processed_samples 20501 unjoint_samples 20500 joint_samples 63 [1026759, 530036] processed_samples 20501 unjoint_samples 20500 joint_samples 63 [1047398, 176616] processed_samples 20500 unjoint_samples 20500 joint_samples 61 [1008369, 1037401] processed_samples 20500 unjoint_samples 20500 joint_samples 61 [1008369, 1037401] processed_samples 20500 unjoint_samples 20500 joint_samples 62 [1043723, 810941] processed_samples 20500 unjoint_samples 20500 joint_samples 62 [1043723, 810941] processed_samples 20500 unjoint_samples 20500 joint_samples 61 [1005553, 1046224] processed_samples 20500 unjoint_samples 20500 joint_samples 61 [1005553, 1046224] processed_samples 20502 unjoint_samples 20500 joint_samples 62 [1046151, 30219] processed_samples 20502 unjoint_samples 20500 joint_samples 62 [1046151, 30219] processed_samples 20500 unjoint_samples 20500 joint_samples 62 [1047712, 863042] processed_samples 20500 unjoint_samples 20500 joint_samples 62 [1047712, 863042] [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d34e7ac0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55e9d42db480] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55f3a79d1d80] mmco: unref short failure [h264 @ 0x55e9d2523300] mmco: unref short failure [h264 @ 0x55e9d2523300] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55f3a7574280] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55e9d34c28c0] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55f3a8072400] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d0b14e40] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55e9d33f1a00] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3aa7214c0] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d0744400] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a7591080] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55e9d1141c80] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a6a52140] mmco: unref short failure [h264 @ 0x55e9d58dfd00] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d5342c40] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55e9d4f815c0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [h264 @ 0x55e9d4f11bc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55e9d31c4cc0] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55f3aa3b8a00] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55f3a667fe80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a6e80800] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55f3aa9f1e00] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure processed_samples 20600 unjoint_samples 20600 joint_samples 62 [1044965, 240354] processed_samples 20600 unjoint_samples 20600 joint_samples 62 [1044965, 240354] processed_samples 20600 unjoint_samples 20600 joint_samples 62 [252577, 1044173] processed_samples 20600 unjoint_samples 20600 joint_samples 62 [252577, 1044173] processed_samples 20600 unjoint_samples 20600 joint_samples 63 [149972, 1039879] processed_samples 20600 unjoint_samples 20600 joint_samples 63 [149972, 1039879] processed_samples 20600 unjoint_samples 20600 joint_samples 63 [1047712, 109995] processed_samples 20600 unjoint_samples 20600 joint_samples 63 [1047712, 109995] [h264 @ 0x55f3abc13a40] mmco: unref short failure [h264 @ 0x55e9d0877300] mmco: unref short failure processed_samples 20601 unjoint_samples 20600 joint_samples 63 [1047398, 433346] processed_samples 20601 unjoint_samples 20600 joint_samples 63 [1047398, 433346] processed_samples 20600 unjoint_samples 20600 joint_samples 61 [639370, 1028031] processed_samples 20600 unjoint_samples 20600 joint_samples 61 [639370, 1028031] processed_samples 20602 unjoint_samples 20600 joint_samples 62 [1046151, 288191] processed_samples 20602 unjoint_samples 20600 joint_samples 62 [1046151, 288191] processed_samples 20601 unjoint_samples 20600 joint_samples 63 [1026759, 872066] processed_samples 20601 unjoint_samples 20600 joint_samples 63 [1026759, 872066] [h264 @ 0x55e9d1e47d40] mmco: unref short failure [h264 @ 0x55f3ab73be40] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3aa4bd480] mmco: unref short failure [h264 @ 0x55e9d1240980] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d82326c0] mmco: unref short failure [h264 @ 0x55f3a8751d80] mmco: unref short failure [h264 @ 0x55e9d48c5b80] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55f3ad7adc00] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55e9d3944980] mmco: unref short failure [h264 @ 0x55f3aa3c2b00] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55f3a77a0740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55e9d3d9b740] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55f3ab5a1540] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55e9d5615e80] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55f3a6cbdb00] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d494d1c0] mmco: unref short failure [h264 @ 0x55e9d3bc3e80] mmco: unref short failure [h264 @ 0x55f3aaf5efc0] mmco: unref short failure [h264 @ 0x55f3a8d8c080] mmco: unref short failure [h264 @ 0x55e9d121c340] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55e9d398e240] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55f3a8a36b80] mmco: unref short failure [h264 @ 0x55e9d12f0000] mmco: unref short failure [h264 @ 0x55f3a85816c0] mmco: unref short failure [2024-12-01 22:43:55,194] torch.distributed.elastic.agent.server.api: [ERROR] Error waiting on exit barrier. Elapsed: 300.1064147949219 seconds + set +x