File size: 317 Bytes
c4adad6
 
 
832577d
d357fd9
 
832577d
d357fd9
 
 
 
23154bc
d357fd9
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
---
license: apache-2.0
---

note : use original open llama tokenizer


model_path = 'ruwan/open-llama-sharded-1GB-7B-alpaca-vmware'

tokenizer = LlamaTokenizer.from_pretrained("openlm-research/open_llama_7b")
model = LlamaForCausalLM.from_pretrained(
    model_path, torch_dtype=torch.float16, device_map='auto'
)