update with +2 epochs ft
Browse files- config.json +1 -1
- pytorch_model.bin +2 -2
- tokenizer_config.json +1 -1
- training_args.bin +1 -1
config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "pszemraj/long-t5-tglobal-base-16384-booksum-
|
3 |
"architectures": [
|
4 |
"LongT5ForConditionalGeneration"
|
5 |
],
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "pszemraj/long-t5-tglobal-base-16384-booksum-V10",
|
3 |
"architectures": [
|
4 |
"LongT5ForConditionalGeneration"
|
5 |
],
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:134d0d187b6cde13e94f8ae3dca1aecdc26ecafe075410af4880e5ec9cf0c23d
|
3 |
+
size 990446387
|
tokenizer_config.json
CHANGED
@@ -103,7 +103,7 @@
|
|
103 |
],
|
104 |
"eos_token": "</s>",
|
105 |
"extra_ids": 100,
|
106 |
-
"name_or_path": "pszemraj/long-t5-tglobal-base-16384-booksum-
|
107 |
"pad_token": "<pad>",
|
108 |
"special_tokens_map_file": null,
|
109 |
"tokenizer_class": "T5Tokenizer",
|
|
|
103 |
],
|
104 |
"eos_token": "</s>",
|
105 |
"extra_ids": 100,
|
106 |
+
"name_or_path": "pszemraj/long-t5-tglobal-base-16384-booksum-V10",
|
107 |
"pad_token": "<pad>",
|
108 |
"special_tokens_map_file": null,
|
109 |
"tokenizer_class": "T5Tokenizer",
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4527
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7590081405196835a42cca00f60db01b8ca901bae8a197f0becb2fcfa0999a94
|
3 |
size 4527
|