Phimabri commited on
Commit
cbf13a6
1 Parent(s): 37e9a14

Trained with Unsloth

Browse files

Upload model trained with Unsloth 2x faster

Files changed (4) hide show
  1. README.md +1 -0
  2. config.json +21 -118
  3. generation_config.json +7 -0
  4. model.safetensors +3 -0
README.md CHANGED
@@ -8,6 +8,7 @@ tags:
8
  - unsloth
9
  - mistral
10
  - trl
 
11
  base_model: unsloth/Phi-3-mini-4k-instruct-bnb-4bit
12
  ---
13
 
 
8
  - unsloth
9
  - mistral
10
  - trl
11
+ - sft
12
  base_model: unsloth/Phi-3-mini-4k-instruct-bnb-4bit
13
  ---
14
 
config.json CHANGED
@@ -1,137 +1,40 @@
1
  {
2
- "_name_or_path": "Phi-3-mini-128k-instruct",
3
  "architectures": [
4
- "Phi3ForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
- "auto_map": {
8
- "AutoConfig": "configuration_phi3.Phi3Config",
9
- "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
10
- },
11
  "bos_token_id": 1,
12
- "embd_pdrop": 0.0,
13
  "eos_token_id": 32000,
14
  "hidden_act": "silu",
15
  "hidden_size": 3072,
16
  "initializer_range": 0.02,
17
  "intermediate_size": 8192,
18
- "max_position_embeddings": 131072,
19
- "model_type": "phi3",
20
  "num_attention_heads": 32,
21
  "num_hidden_layers": 32,
22
  "num_key_value_heads": 32,
23
- "original_max_position_embeddings": 4096,
24
  "pad_token_id": 32000,
25
- "resid_pdrop": 0.0,
26
- "rms_norm_eps": 1e-05,
27
- "rope_scaling": {
28
- "long_factor": [
29
- 1.0299999713897705,
30
- 1.0499999523162842,
31
- 1.0499999523162842,
32
- 1.0799999237060547,
33
- 1.2299998998641968,
34
- 1.2299998998641968,
35
- 1.2999999523162842,
36
- 1.4499999284744263,
37
- 1.5999999046325684,
38
- 1.6499998569488525,
39
- 1.8999998569488525,
40
- 2.859999895095825,
41
- 3.68999981880188,
42
- 5.419999599456787,
43
- 5.489999771118164,
44
- 5.489999771118164,
45
- 9.09000015258789,
46
- 11.579999923706055,
47
- 15.65999984741211,
48
- 15.769999504089355,
49
- 15.789999961853027,
50
- 18.360000610351562,
51
- 21.989999771118164,
52
- 23.079999923706055,
53
- 30.009998321533203,
54
- 32.35000228881836,
55
- 32.590003967285156,
56
- 35.56000518798828,
57
- 39.95000457763672,
58
- 53.840003967285156,
59
- 56.20000457763672,
60
- 57.95000457763672,
61
- 59.29000473022461,
62
- 59.77000427246094,
63
- 59.920005798339844,
64
- 61.190006256103516,
65
- 61.96000671386719,
66
- 62.50000762939453,
67
- 63.3700065612793,
68
- 63.48000717163086,
69
- 63.48000717163086,
70
- 63.66000747680664,
71
- 63.850006103515625,
72
- 64.08000946044922,
73
- 64.760009765625,
74
- 64.80001068115234,
75
- 64.81001281738281,
76
- 64.81001281738281
77
- ],
78
- "short_factor": [
79
- 1.05,
80
- 1.05,
81
- 1.05,
82
- 1.1,
83
- 1.1,
84
- 1.1500000000000001,
85
- 1.2000000000000002,
86
- 1.2500000000000002,
87
- 1.3000000000000003,
88
- 1.3500000000000003,
89
- 1.5000000000000004,
90
- 2.000000000000001,
91
- 2.000000000000001,
92
- 2.000000000000001,
93
- 2.000000000000001,
94
- 2.000000000000001,
95
- 2.000000000000001,
96
- 2.000000000000001,
97
- 2.000000000000001,
98
- 2.000000000000001,
99
- 2.000000000000001,
100
- 2.000000000000001,
101
- 2.000000000000001,
102
- 2.000000000000001,
103
- 2.000000000000001,
104
- 2.000000000000001,
105
- 2.000000000000001,
106
- 2.000000000000001,
107
- 2.000000000000001,
108
- 2.000000000000001,
109
- 2.000000000000001,
110
- 2.000000000000001,
111
- 2.0500000000000007,
112
- 2.0500000000000007,
113
- 2.0500000000000007,
114
- 2.1000000000000005,
115
- 2.1000000000000005,
116
- 2.1000000000000005,
117
- 2.1500000000000004,
118
- 2.1500000000000004,
119
- 2.3499999999999996,
120
- 2.549999999999999,
121
- 2.5999999999999988,
122
- 2.5999999999999988,
123
- 2.7499999999999982,
124
- 2.849999999999998,
125
- 2.849999999999998,
126
- 2.9499999999999975
127
- ],
128
- "type": "su"
129
  },
 
130
  "rope_theta": 10000.0,
131
- "sliding_window": 262144,
132
  "tie_word_embeddings": false,
133
- "torch_dtype": "bfloat16",
134
- "transformers_version": "4.39.3",
 
135
  "use_cache": true,
136
  "vocab_size": 32064
137
- }
 
1
  {
2
+ "_name_or_path": "unsloth/Phi-3-mini-4k-instruct-bnb-4bit",
3
  "architectures": [
4
+ "MistralForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
 
 
 
 
7
  "bos_token_id": 1,
 
8
  "eos_token_id": 32000,
9
  "hidden_act": "silu",
10
  "hidden_size": 3072,
11
  "initializer_range": 0.02,
12
  "intermediate_size": 8192,
13
+ "max_position_embeddings": 4096,
14
+ "model_type": "mistral",
15
  "num_attention_heads": 32,
16
  "num_hidden_layers": 32,
17
  "num_key_value_heads": 32,
 
18
  "pad_token_id": 32000,
19
+ "quantization_config": {
20
+ "bnb_4bit_compute_dtype": "float16",
21
+ "bnb_4bit_quant_type": "nf4",
22
+ "bnb_4bit_use_double_quant": true,
23
+ "llm_int8_enable_fp32_cpu_offload": false,
24
+ "llm_int8_has_fp16_weight": false,
25
+ "llm_int8_skip_modules": null,
26
+ "llm_int8_threshold": 6.0,
27
+ "load_in_4bit": true,
28
+ "load_in_8bit": false,
29
+ "quant_method": "bitsandbytes"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  },
31
+ "rms_norm_eps": 1e-05,
32
  "rope_theta": 10000.0,
33
+ "sliding_window": 2048,
34
  "tie_word_embeddings": false,
35
+ "torch_dtype": "float16",
36
+ "transformers_version": "4.40.2",
37
+ "unsloth_version": "2024.5",
38
  "use_cache": true,
39
  "vocab_size": 32064
40
+ }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 32000,
5
+ "pad_token_id": 32000,
6
+ "transformers_version": "4.40.2"
7
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:99310e83b03a0e394e503026b3f9747bfd54c155e2811d1d0f417240755a8419
3
+ size 2264298173