Update README.md
Browse files
README.md
CHANGED
@@ -58,7 +58,7 @@ Just like any Huggingface model, just run it using the transformers library:
|
|
58 |
```python
|
59 |
# pip install transformers
|
60 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
61 |
-
checkpoint = "SultanR/SmolTulu-
|
62 |
device = "cuda" # for GPU usage or "cpu" for CPU usage
|
63 |
tokenizer = AutoTokenizer.from_pretrained(checkpoint)
|
64 |
# for multiple GPUs install accelerate and do `model = AutoModelForCausalLM.from_pretrained(checkpoint, device_map="auto")`
|
|
|
58 |
```python
|
59 |
# pip install transformers
|
60 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
61 |
+
checkpoint = "SultanR/SmolTulu-1.7b-Instruct"
|
62 |
device = "cuda" # for GPU usage or "cpu" for CPU usage
|
63 |
tokenizer = AutoTokenizer.from_pretrained(checkpoint)
|
64 |
# for multiple GPUs install accelerate and do `model = AutoModelForCausalLM.from_pretrained(checkpoint, device_map="auto")`
|