dsikka commited on
Commit
8bd4c1d
1 Parent(s): bfcf63a

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. config.json +11 -3
  2. generation_config.json +1 -1
  3. model.safetensors +1 -1
  4. recipe.yaml +6 -0
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/home/runner/.cache/huggingface/hub/models--TinyLlama--TinyLlama-1.1B-Chat-v1.0/snapshots/fe8a4ea1ffedaf415f4da2f062534de366a451e6",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -48,14 +48,22 @@
48
  ],
49
  "kv_cache_scheme": null,
50
  "quant_method": "compressed-tensors",
51
- "quantization_status": "compressed"
 
 
 
 
 
 
 
 
52
  },
53
  "rms_norm_eps": 1e-05,
54
  "rope_scaling": null,
55
  "rope_theta": 10000.0,
56
  "tie_word_embeddings": false,
57
  "torch_dtype": "bfloat16",
58
- "transformers_version": "4.46.2",
59
  "use_cache": true,
60
  "vocab_size": 32000
61
  }
 
1
  {
2
+ "_name_or_path": "TinyLlama/TinyLlama-1.1B-Chat-v1.0",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
48
  ],
49
  "kv_cache_scheme": null,
50
  "quant_method": "compressed-tensors",
51
+ "quantization_status": "compressed",
52
+ "sparsity_config": {
53
+ "format": "dense",
54
+ "global_sparsity": 0.14375328644374685,
55
+ "ignore": [],
56
+ "registry_requires_subclass": false,
57
+ "sparsity_structure": "unstructured",
58
+ "targets": []
59
+ }
60
  },
61
  "rms_norm_eps": 1e-05,
62
  "rope_scaling": null,
63
  "rope_theta": 10000.0,
64
  "tie_word_embeddings": false,
65
  "torch_dtype": "bfloat16",
66
+ "transformers_version": "4.46.3",
67
  "use_cache": true,
68
  "vocab_size": 32000
69
  }
generation_config.json CHANGED
@@ -3,5 +3,5 @@
3
  "eos_token_id": 2,
4
  "max_length": 2048,
5
  "pad_token_id": 0,
6
- "transformers_version": "4.46.2"
7
  }
 
3
  "eos_token_id": 2,
4
  "max_length": 2048,
5
  "pad_token_id": 0,
6
+ "transformers_version": "4.46.3"
7
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8f5735b95b606742c6d70f5d9631d39ebaba8449d1e53404bec2dd597a713d61
3
  size 761968800
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:892873b79abcf66508b526a74179177e303de9b4b1464a6177096348faa7fd76
3
  size 761968800
recipe.yaml ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ DEFAULT_stage:
2
+ DEFAULT_modifiers:
3
+ GPTQModifier:
4
+ targets: Linear
5
+ ignore: [lm_head]
6
+ scheme: W4A16