|
[20:07:45] - INFO - absl - A polynomial schedule was set with `` ; this results in a constant schedule with value `init_value`. |
|
/home/dat/pino/lib/python3.8/site-packages/jax/_src/numpy/lax_numpy.py:3132: UserWarning: Explicitly requested dtype <class 'jax._src.numpy.lax_numpy.int64'> requested in zeros is not available, and will be truncated to dtype int32. To enable more dtypes, set the ///google/ |
|
lax._check_user_dtype_supported(dtype, "zeros") |
|
/home/dat/pino/lib/python3.8/site-packages/jax/lib/xla_bridge.py:386: UserWarning: jax.host_count has been renamed to jax.process_count. This alias will eventually be removed; please update your code. |
|
warnings.warn( |
|
/home/dat/pino/lib/python3.8/site-packages/jax/lib/xla_bridge.py:373: UserWarning: jax.host_id ; please update your code. |
|
warnings.warn( |
|
Epoch ... (1/3): 0%| | 0/3 [00:00<?, ?it/s][20:07:46] - INFO - __main__ - Skipping to epoch 0 step 0 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Training...: 20%|ββββββββββββββββββββββββ | 250/1250 [04:02<12:01, 1.39it/s] |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Training...: 40%|ββββββββββββββββββββββββββββββββββββββββββββββββ | 500/1250 [07:03<09:01, 1.39it/s] |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Training...: 60%|ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | 750/1250 [10:04<06:00, 1.39it/s] |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Training...: 80%|ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | 1000/1250 [13:05<03:00, 1.39it/s] |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Epoch ... (1/3): 33%|ββββββββββββββββββββββββββββββββββββββββ | 1/3 [16:10<32:21, 970.78s/it] |
|
Epoch ... (1/3): 33%|ββββββββββββββββββββββββββββββββββββββββ | 1/3 [16:36<32:21, 970.78s/it] |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Training...: 20%|ββββββββββββββββββββββββ | 250/1250 [02:37<12:01, 1.39it/s] |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Training...: 40%|ββββββββββββββββββββββββββββββββββββββββββββββββ | 500/1250 [05:38<09:01, 1.38it/s] |
|
|
|
Epoch ... (1/3): 33%|ββββββββββββββββββββββββββββββββββββββββ | 1/3 [22:40<45:20, 1360.00s/it] |
|
Traceback (most recent call last): |
|
File "./run_mlm_flax.py", line 785, in <module> |
|
samples = [tokenized_datasets["train"][int(idx)] for idx in batch_idx] |
|
File "./run_mlm_flax.py", line 785, in <listcomp> |
|
samples = [tokenized_datasets["train"][int(idx)] for idx in batch_idx] |
|
File "/home/dat/datasets/src/datasets/arrow_dataset.py", line 1517, in __getitem__ |
|
return self._getitem( |
|
File "/home/dat/datasets/src/datasets/arrow_dataset.py", line 1510, in _getitem |
|
formatted_output = format_table( |
|
File "/home/dat/datasets/src/datasets/formatting/formatting.py", line 414, in format_table |
|
return formatter(pa_table, query_type=query_type) |
|
File "/home/dat/datasets/src/datasets/formatting/formatting.py", line 194, in __call__ |
|
return self.format_row(pa_table) |
|
File "/home/dat/datasets/src/datasets/formatting/formatting.py", line 223, in format_row |
|
return self.python_arrow_extractor().extract_row(pa_table) |
|
File "/home/dat/datasets/src/datasets/formatting/formatting.py", line 134, in extract_row |
|
return _unnest(pa_table.to_pydict()) |
|
|