wujohns commited on
Commit
b4a149d
·
1 Parent(s): e4fe2f0

Upload config

Browse files
Files changed (1) hide show
  1. config.json +12 -11
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/root/demos/gpt2-chitchat/model_epoch40_50w",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
@@ -8,21 +8,16 @@
8
  "bos_token_id": 50256,
9
  "embd_pdrop": 0.1,
10
  "eos_token_id": 50256,
11
- "id2label": {
12
- "0": "LABEL_0"
13
- },
14
  "initializer_range": 0.02,
15
- "label2id": {
16
- "LABEL_0": 0
17
- },
18
  "layer_norm_epsilon": 1e-05,
19
  "model_type": "gpt2",
20
- "n_ctx": 300,
21
  "n_embd": 768,
22
  "n_head": 12,
23
  "n_inner": null,
24
- "n_layer": 10,
25
- "n_positions": 300,
26
  "output_past": true,
27
  "reorder_and_upcast_attn": false,
28
  "resid_pdrop": 0.1,
@@ -33,7 +28,13 @@
33
  "summary_proj_to_labels": true,
34
  "summary_type": "cls_index",
35
  "summary_use_proj": true,
36
- "torch_dtype": "float32",
 
 
 
 
 
 
37
  "transformers_version": "4.26.1",
38
  "use_cache": true,
39
  "vocab_size": 13317
 
1
  {
2
+ "_name_or_path": "model/epoch29",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
 
8
  "bos_token_id": 50256,
9
  "embd_pdrop": 0.1,
10
  "eos_token_id": 50256,
11
+ "gradient_checkpointing": false,
 
 
12
  "initializer_range": 0.02,
 
 
 
13
  "layer_norm_epsilon": 1e-05,
14
  "model_type": "gpt2",
15
+ "n_ctx": 1024,
16
  "n_embd": 768,
17
  "n_head": 12,
18
  "n_inner": null,
19
+ "n_layer": 12,
20
+ "n_positions": 1024,
21
  "output_past": true,
22
  "reorder_and_upcast_attn": false,
23
  "resid_pdrop": 0.1,
 
28
  "summary_proj_to_labels": true,
29
  "summary_type": "cls_index",
30
  "summary_use_proj": true,
31
+ "task_specific_params": {
32
+ "text-generation": {
33
+ "do_sample": true,
34
+ "max_length": 400
35
+ }
36
+ },
37
+ "tokenizer_class": "BertTokenizer",
38
  "transformers_version": "4.26.1",
39
  "use_cache": true,
40
  "vocab_size": 13317