Update README.md
Browse files
README.md
CHANGED
@@ -1,33 +1,3 @@
|
|
1 |
-
---
|
2 |
-
license: apache-2.0
|
3 |
-
---
|
4 |
-
# NeuralHermes-2.5-Mistral-7B
|
5 |
-
|
6 |
-
## Description
|
7 |
-
This repo contains GGUF format model files for NeuralHermes-2.5-Mistral-7B.
|
8 |
-
|
9 |
-
## Files Provided
|
10 |
-
| Name | Quant | Bits | File Size | Remark |
|
11 |
-
| ---------------------------- | ------- | ---- | --------- | -------------------------------- |
|
12 |
-
| neuralhermes-2.5-mistral-7b.IQ3_S.gguf | IQ3_S | 3 | 3.18 GB | 3.44 bpw quantization |
|
13 |
-
| neuralhermes-2.5-mistral-7b.IQ3_M.gguf | IQ3_M | 3 | 3.28 GB | 3.66 bpw quantization mix |
|
14 |
-
| neuralhermes-2.5-mistral-7b.Q4_0.gguf | Q4_0 | 4 | 4.11 GB | 3.56G, +0.2166 ppl |
|
15 |
-
| neuralhermes-2.5-mistral-7b.IQ4_NL.gguf | IQ4_NL | 4 | 4.16 GB | 4.25 bpw non-linear quantization |
|
16 |
-
| neuralhermes-2.5-mistral-7b.Q4_K_M.gguf | Q4_K_M | 4 | 4.37 GB | 3.80G, +0.0532 ppl |
|
17 |
-
| neuralhermes-2.5-mistral-7b.Q5_K_M.gguf | Q5_K_M | 5 | 5.13 GB | 4.45G, +0.0122 ppl |
|
18 |
-
| neuralhermes-2.5-mistral-7b.Q6_K.gguf | Q6_K | 6 | 5.94 GB | 5.15G, +0.0008 ppl |
|
19 |
-
| neuralhermes-2.5-mistral-7b.Q8_0.gguf | Q8_0 | 8 | 7.70 GB | 6.70G, +0.0004 ppl |
|
20 |
-
|
21 |
-
## Parameters
|
22 |
-
| path | type | architecture | rope_theta | sliding_win | max_pos_embed |
|
23 |
-
| ---------------------------- | ------- | ------------------ | ---------- | ----------- | ------------- |
|
24 |
-
| teknium/OpenHermes-2.5-Mistral-7B | mistral | MistralForCausalLM | 10000 | 4096 | 32768 |
|
25 |
-
|
26 |
-
## Benchmarks
|
27 |
-
![](https://i.ibb.co/N2kwGJY/Neural-Hermes-2-5-Mistral-7-B.png)
|
28 |
-
|
29 |
-
# Original Model Card
|
30 |
-
|
31 |
---
|
32 |
language:
|
33 |
- en
|
@@ -149,6 +119,32 @@ model-index:
|
|
149 |
url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard?query=mlabonne/NeuralHermes-2.5-Mistral-7B
|
150 |
name: Open LLM Leaderboard
|
151 |
---
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
152 |
|
153 |
<center><img src="https://i.imgur.com/qIhaFNM.png"></center>
|
154 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
---
|
2 |
language:
|
3 |
- en
|
|
|
119 |
url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard?query=mlabonne/NeuralHermes-2.5-Mistral-7B
|
120 |
name: Open LLM Leaderboard
|
121 |
---
|
122 |
+
# NeuralHermes-2.5-Mistral-7B
|
123 |
+
|
124 |
+
## Description
|
125 |
+
This repo contains GGUF format model files for NeuralHermes-2.5-Mistral-7B.
|
126 |
+
|
127 |
+
## Files Provided
|
128 |
+
| Name | Quant | Bits | File Size | Remark |
|
129 |
+
| ---------------------------- | ------- | ---- | --------- | -------------------------------- |
|
130 |
+
| neuralhermes-2.5-mistral-7b.IQ3_S.gguf | IQ3_S | 3 | 3.18 GB | 3.44 bpw quantization |
|
131 |
+
| neuralhermes-2.5-mistral-7b.IQ3_M.gguf | IQ3_M | 3 | 3.28 GB | 3.66 bpw quantization mix |
|
132 |
+
| neuralhermes-2.5-mistral-7b.Q4_0.gguf | Q4_0 | 4 | 4.11 GB | 3.56G, +0.2166 ppl |
|
133 |
+
| neuralhermes-2.5-mistral-7b.IQ4_NL.gguf | IQ4_NL | 4 | 4.16 GB | 4.25 bpw non-linear quantization |
|
134 |
+
| neuralhermes-2.5-mistral-7b.Q4_K_M.gguf | Q4_K_M | 4 | 4.37 GB | 3.80G, +0.0532 ppl |
|
135 |
+
| neuralhermes-2.5-mistral-7b.Q5_K_M.gguf | Q5_K_M | 5 | 5.13 GB | 4.45G, +0.0122 ppl |
|
136 |
+
| neuralhermes-2.5-mistral-7b.Q6_K.gguf | Q6_K | 6 | 5.94 GB | 5.15G, +0.0008 ppl |
|
137 |
+
| neuralhermes-2.5-mistral-7b.Q8_0.gguf | Q8_0 | 8 | 7.70 GB | 6.70G, +0.0004 ppl |
|
138 |
+
|
139 |
+
## Parameters
|
140 |
+
| path | type | architecture | rope_theta | sliding_win | max_pos_embed |
|
141 |
+
| ---------------------------- | ------- | ------------------ | ---------- | ----------- | ------------- |
|
142 |
+
| teknium/OpenHermes-2.5-Mistral-7B | mistral | MistralForCausalLM | 10000 | 4096 | 32768 |
|
143 |
+
|
144 |
+
## Benchmarks
|
145 |
+
![](https://i.ibb.co/N2kwGJY/Neural-Hermes-2-5-Mistral-7-B.png)
|
146 |
+
|
147 |
+
# Original Model Card
|
148 |
|
149 |
<center><img src="https://i.imgur.com/qIhaFNM.png"></center>
|
150 |
|