File size: 465 Bytes
b1c30b2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
{
  "bos_token_id": 1,
  "do_sample": true,
  "eos_token_id": 2,
  "max_length": 4096,
  "max_memory": {
    "0": 19406892448,
    "1": 19406892448,
    "2": 19406892448,
    "3": 19406892448,
    "4": 19406892448,
    "5": 19406892448,
    "6": 19406892448,
    "7": 21474836480
  },
  "no_split_module_classes": [
    "LlamaDecoderLayer"
  ],
  "pad_token_id": 0,
  "special_dtypes": {},
  "temperature": 0.6,
  "top_p": 0.9,
  "transformers_version": "4.31.0"
}