Isaak Carter Augustus
commited on
Commit
•
bb36761
1
Parent(s):
b22202e
Update README.md
Browse files
README.md
CHANGED
@@ -1,7 +1,4 @@
|
|
1 |
---
|
2 |
-
license: other
|
3 |
-
license_name: gml
|
4 |
-
license_link: https://github.com/OpenBMB/General-Model-License/blob/main/%E9%80%9A%E7%94%A8%E6%A8%A1%E5%9E%8B%E8%AE%B8%E5%8F%AF%E5%8D%8F%E8%AE%AE-%E6%9D%A5%E6%BA%90%E8%AF%B4%E6%98%8E-%E5%AE%A3%E4%BC%A0%E9%99%90%E5%88%B6-%E5%95%86%E4%B8%9A%E6%8E%88%E6%9D%83.md
|
5 |
language:
|
6 |
- en
|
7 |
- zh
|
@@ -9,8 +6,22 @@ tags:
|
|
9 |
- MiniCPM
|
10 |
- ModelBest
|
11 |
- THUNLP
|
|
|
|
|
12 |
---
|
13 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
14 |
|
15 |
<div align="center">
|
16 |
<h1>
|
@@ -98,7 +109,7 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
|
|
98 |
import torch
|
99 |
torch.manual_seed(0)
|
100 |
|
101 |
-
path = 'openbmb/MiniCPM-2B-
|
102 |
tokenizer = AutoTokenizer.from_pretrained(path)
|
103 |
model = AutoModelForCausalLM.from_pretrained(path, torch_dtype=torch.float32, device_map='cuda', trust_remote_code=True)
|
104 |
|
@@ -151,4 +162,4 @@ print(responds)
|
|
151 |
booktitle={OpenBMB Blog},
|
152 |
year={2024}
|
153 |
}
|
154 |
-
```
|
|
|
1 |
---
|
|
|
|
|
|
|
2 |
language:
|
3 |
- en
|
4 |
- zh
|
|
|
6 |
- MiniCPM
|
7 |
- ModelBest
|
8 |
- THUNLP
|
9 |
+
- mlx
|
10 |
+
library_name: mlx
|
11 |
---
|
12 |
|
13 |
+
# Important:
|
14 |
+
made a seperate repo, because the OG creators didn't merge the safensors variant commit from the huggingface team, and due to the model_type property not existing in the config, (for my MiniCPM implementatino in mlx-examples). This is all fixed here.
|
15 |
+
|
16 |
+
To use in mlx:
|
17 |
+
|
18 |
+
```sh
|
19 |
+
python -m mlx_lm.generate --model Isaak-Carter/MiniCPM-2B-dpo-fp32-safetensors --prompt "hello"
|
20 |
+
```
|
21 |
+
|
22 |
+
|
23 |
+
|
24 |
+
|
25 |
|
26 |
<div align="center">
|
27 |
<h1>
|
|
|
109 |
import torch
|
110 |
torch.manual_seed(0)
|
111 |
|
112 |
+
path = 'openbmb/MiniCPM-2B-dpo-fp32'
|
113 |
tokenizer = AutoTokenizer.from_pretrained(path)
|
114 |
model = AutoModelForCausalLM.from_pretrained(path, torch_dtype=torch.float32, device_map='cuda', trust_remote_code=True)
|
115 |
|
|
|
162 |
booktitle={OpenBMB Blog},
|
163 |
year={2024}
|
164 |
}
|
165 |
+
```
|