ybelkada commited on
Commit
0a25285
1 Parent(s): 4d4ccd9

Upload MptForCausalLM

Browse files
config.json CHANGED
@@ -1,5 +1,9 @@
1
  {
2
- "_commit_hash": null,
 
 
 
 
3
  "attn_config": {
4
  "_name_or_path": "",
5
  "add_cross_attention": false,
@@ -162,6 +166,7 @@
162
  "no_bias": false,
163
  "norm_type": "low_precision_layernorm",
164
  "resid_pdrop": 0.0,
 
165
  "transformers_version": null,
166
  "use_cache": false,
167
  "verbose": 0,
 
1
  {
2
+ "_commit_hash": "4d4ccd9ff46c5df497ab1ecabd9efd15e848fb69",
3
+ "_name_or_path": "ybelkada/mpt-7b-transformers",
4
+ "architectures": [
5
+ "MptForCausalLM"
6
+ ],
7
  "attn_config": {
8
  "_name_or_path": "",
9
  "add_cross_attention": false,
 
166
  "no_bias": false,
167
  "norm_type": "low_precision_layernorm",
168
  "resid_pdrop": 0.0,
169
+ "torch_dtype": "bfloat16",
170
  "transformers_version": null,
171
  "use_cache": false,
172
  "verbose": 0,
pytorch_model-00001-of-00002.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bd7ac49df499e6b0ac130d779b115757a066605ba76d184353545793f5c918a5
3
- size 9943042874
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e721912a23fe4be3f8ec1de2de1e2106646d3ac3d877fe8a27b8643b5a28bde
3
+ size 9943040826
pytorch_model-00002-of-00002.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3a44a6a1de81702a1f7900e11b180968977ca7072a93ebaa3a442768dd9f9c83
3
- size 3355600302
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e08ea69c89f09d611f9154bcab2765ccf282cbc3e3f49d6bc549c71a2cd80e14
3
+ size 3355599662