bongseok commited on
Commit
69e87fd
·
1 Parent(s): 5a90509

Training in progress, step 2000

Browse files
.gitattributes CHANGED
@@ -32,3 +32,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
32
  *.zip filter=lfs diff=lfs merge=lfs -text
33
  *.zst filter=lfs diff=lfs merge=lfs -text
34
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
32
  *.zip filter=lfs diff=lfs merge=lfs -text
33
  *.zst filter=lfs diff=lfs merge=lfs -text
34
  *tfevents* filter=lfs diff=lfs merge=lfs -text
35
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ checkpoint-*/
config.json ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "gogamza/kobart-base-v2",
3
+ "activation_dropout": 0.0,
4
+ "activation_function": "gelu",
5
+ "add_bias_logits": false,
6
+ "add_final_layer_norm": false,
7
+ "architectures": [
8
+ "BartForConditionalGeneration"
9
+ ],
10
+ "attention_dropout": 0.0,
11
+ "author": "Heewon Jeon(madjakarta@gmail.com)",
12
+ "bos_token_id": 1,
13
+ "classif_dropout": 0.1,
14
+ "classifier_dropout": 0.1,
15
+ "d_model": 768,
16
+ "decoder_attention_heads": 16,
17
+ "decoder_ffn_dim": 3072,
18
+ "decoder_layerdrop": 0.0,
19
+ "decoder_layers": 6,
20
+ "decoder_start_token_id": 1,
21
+ "do_blenderbot_90_layernorm": false,
22
+ "dropout": 0.1,
23
+ "early_stopping": true,
24
+ "encoder_attention_heads": 16,
25
+ "encoder_ffn_dim": 3072,
26
+ "encoder_layerdrop": 0.0,
27
+ "encoder_layers": 6,
28
+ "eos_token_id": 1,
29
+ "extra_pos_embeddings": 2,
30
+ "force_bos_token_to_be_generated": false,
31
+ "forced_eos_token_id": 1,
32
+ "gradient_checkpointing": false,
33
+ "id2label": {
34
+ "0": "LABEL_0"
35
+ },
36
+ "init_std": 0.02,
37
+ "is_encoder_decoder": true,
38
+ "kobart_version": 2.0,
39
+ "label2id": {
40
+ "LABEL_0": 0
41
+ },
42
+ "length_penalty": 5.0,
43
+ "max_length": 128,
44
+ "max_position_embeddings": 1026,
45
+ "min_length": 30,
46
+ "model_type": "bart",
47
+ "no_repeat_ngram_size": 2,
48
+ "normalize_before": false,
49
+ "normalize_embedding": true,
50
+ "num_beams": 10,
51
+ "num_hidden_layers": 6,
52
+ "pad_token_id": 3,
53
+ "scale_embedding": false,
54
+ "static_position_embeddings": false,
55
+ "tokenizer_class": "PreTrainedTokenizerFast",
56
+ "torch_dtype": "float32",
57
+ "transformers_version": "4.25.1",
58
+ "use_cache": true,
59
+ "vocab_size": 30000
60
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:64427d03c725b672bb3aac504837bc6142fe9d208225322f336a77b94a22cd62
3
+ size 495648413
runs/Feb01_01-56-33_05ae2094ef53/1675216623.5902588/events.out.tfevents.1675216623.05ae2094ef53.1700.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aef07524e49ff5b6a2a63b8ecf74b1514beaf5e0ad1ea2429f4f9a980226a12b
3
+ size 5800
runs/Feb01_01-56-33_05ae2094ef53/events.out.tfevents.1675216623.05ae2094ef53.1700.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:262cec7ee4e21ef763721c96c3814cac786d2eacdafba8185038ac43b9ac4989
3
+ size 6346
runs/Feb01_02-23-01_05ae2094ef53/1675218219.7537491/events.out.tfevents.1675218219.05ae2094ef53.2212.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68715817ffef58b306a6b2eab9ddb0b18544febfb937f92e919d46190f51f795
3
+ size 5800
runs/Feb01_02-23-01_05ae2094ef53/events.out.tfevents.1675218219.05ae2094ef53.2212.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4dd5d2914302b8b3dd95ca7fea5266c9c192c37d030218cb02e67de7b0e72ca1
3
+ size 7327
runs/Feb01_02-50-10_05ae2094ef53/1675219836.990371/events.out.tfevents.1675219836.05ae2094ef53.3026.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5cf001442ceeb708de253fc97443f81b2b7f0dd30416b58d325e20f352d55dc
3
+ size 5800
runs/Feb01_02-50-10_05ae2094ef53/events.out.tfevents.1675219836.05ae2094ef53.3026.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e74d2d000b9419a7a5a8b5cbc7e422088bdcb57986e37f6d39e7f777a188e5d
3
+ size 6345
runs/Feb01_03-20-31_05ae2094ef53/1675221658.471225/events.out.tfevents.1675221658.05ae2094ef53.4182.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:deb779f20e1263f392c34a2361a0675371e6d55e3ad509c66667898b3c4faedc
3
+ size 5800
runs/Feb01_03-20-31_05ae2094ef53/events.out.tfevents.1675221658.05ae2094ef53.4182.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8544155c3665d9921f8dd6ef3de7d0af1d99df9de5d6d8ef8ec13e9eb6a8661b
3
+ size 9349
special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "</s>",
3
+ "eos_token": "</s>",
4
+ "mask_token": "<mask>",
5
+ "pad_token": "<pad>",
6
+ "unk_token": "<unk>"
7
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:444f68abc3b798ce3f285f1afff88f634a15514d53c11b49d5f9f640a006af03
3
+ size 1049337
tokenizer_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "max_length": 512,
3
+ "model_max_length": 1000000000000000019884624838656,
4
+ "name_or_path": "gogamza/kobart-base-v2",
5
+ "special_tokens_map_file": "/opt/ml/.cache/huggingface/hub/models--gogamza--kobart-base-v2/snapshots/f9f2ec35d3c32a1ecc7a3281f9626b7ec1913fed/special_tokens_map.json",
6
+ "tokenizer_class": "PreTrainedTokenizerFast"
7
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:075e54844d95624a724829b3cd8639bd91f721a603eb11b396040cc54f444ef1
3
+ size 3643