Saving weights and logs of epoch 60
Browse files- .run_translation_t5_flax.py.swp +0 -0
- config.json +1 -1
- events.out.tfevents.1625772729.t1v-n-55481057-w-0.61291.3.v2 +0 -0
- events.out.tfevents.1625773419.t1v-n-55481057-w-0.63334.3.v2 +0 -0
- events.out.tfevents.1625773718.t1v-n-55481057-w-0.64843.3.v2 +0 -0
- events.out.tfevents.1625774173.t1v-n-55481057-w-0.68444.3.v2 +0 -0
- events.out.tfevents.1625774311.t1v-n-55481057-w-0.70900.3.v2 +0 -0
- flax_model.msgpack +2 -2
- run_translation_t5_flax.py +1 -1
.run_translation_t5_flax.py.swp
DELETED
Binary file (57.3 kB)
|
|
config.json
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1360
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:80be336bcba50f8e015afd036ba980a21329e4a6e812c733cf4af5e66742ec69
|
3 |
size 1360
|
events.out.tfevents.1625772729.t1v-n-55481057-w-0.61291.3.v2
ADDED
Binary file (494 kB). View file
|
|
events.out.tfevents.1625773419.t1v-n-55481057-w-0.63334.3.v2
ADDED
Binary file (118 kB). View file
|
|
events.out.tfevents.1625773718.t1v-n-55481057-w-0.64843.3.v2
ADDED
Binary file (118 kB). View file
|
|
events.out.tfevents.1625774173.t1v-n-55481057-w-0.68444.3.v2
ADDED
Binary file (40 Bytes). View file
|
|
events.out.tfevents.1625774311.t1v-n-55481057-w-0.70900.3.v2
ADDED
Binary file (1.74 MB). View file
|
|
flax_model.msgpack
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:62a1930f893476dcbae8d8bc495c6a5e0b9d0ec5a7b0b517f81e32fb2acc64d2
|
3 |
+
size 242032202
|
run_translation_t5_flax.py
CHANGED
@@ -467,7 +467,7 @@ def main():
|
|
467 |
load_from_cache_file=not data_args.overwrite_cache,
|
468 |
desc="Running tokenizer on train dataset",
|
469 |
)
|
470 |
-
|
471 |
if training_args.do_eval:
|
472 |
max_target_length = data_args.val_max_target_length
|
473 |
if "validation" not in dataset:
|
|
|
467 |
load_from_cache_file=not data_args.overwrite_cache,
|
468 |
desc="Running tokenizer on train dataset",
|
469 |
)
|
470 |
+
|
471 |
if training_args.do_eval:
|
472 |
max_target_length = data_args.val_max_target_length
|
473 |
if "validation" not in dataset:
|