Update README.md
Browse files
README.md
CHANGED
@@ -17,6 +17,8 @@ from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline, set_seed
|
|
17 |
|
18 |
model = AutoModelForCausalLM.from_pretrained("sbintuitions/sarashina1-65b", torch_dtype=torch.float16, device_map="auto")
|
19 |
tokenizer = AutoTokenizer.from_pretrained("sbintuitions/sarashina1-65b")
|
|
|
|
|
20 |
generator = pipeline("text-generation", model=model, tokenizer=tokenizer)
|
21 |
set_seed(123)
|
22 |
|
|
|
17 |
|
18 |
model = AutoModelForCausalLM.from_pretrained("sbintuitions/sarashina1-65b", torch_dtype=torch.float16, device_map="auto")
|
19 |
tokenizer = AutoTokenizer.from_pretrained("sbintuitions/sarashina1-65b")
|
20 |
+
# If you want to use slow tokenizer
|
21 |
+
# tokenizer = AutoTokenizer.from_pretrained("sbintuitions/sarashina1-65b", use_fast=False, revision="slow-tokenizer")
|
22 |
generator = pipeline("text-generation", model=model, tokenizer=tokenizer)
|
23 |
set_seed(123)
|
24 |
|