Update README.md
Browse files
README.md
CHANGED
@@ -14,19 +14,13 @@ import torch
|
|
14 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
15 |
|
16 |
torch_dtype = torch.bfloat16 if torch.cuda.is_available() and hasattr(torch.cuda, "is_bf16_supported") and torch.cuda.is_bf16_supported() else torch.float16
|
17 |
-
|
18 |
model = AutoModelForCausalLM.from_pretrained("Calvin-Xu/FLFL", device_map="auto", torch_dtype=torch_dtype)
|
19 |
tokenizer = AutoTokenizer.from_pretrained("Calvin-Xu/FLFL")
|
20 |
|
21 |
prompt_template = """[INST] {instruction}\n{input}\n[/INST]\n"""
|
22 |
sentence = "国境の長いトンネルを抜けると雪国であった"
|
23 |
|
24 |
-
inputs = tokenizer(
|
25 |
-
prompt_template.format(
|
26 |
-
instruction="次の文に正確に振り仮名を付けてください", input=sentence
|
27 |
-
),
|
28 |
-
return_tensors="pt",
|
29 |
-
).to(model.device)
|
30 |
with torch.no_grad():
|
31 |
tokens = model.generate(**inputs, max_new_tokens=512, do_sample=False)
|
32 |
|
|
|
14 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
15 |
|
16 |
torch_dtype = torch.bfloat16 if torch.cuda.is_available() and hasattr(torch.cuda, "is_bf16_supported") and torch.cuda.is_bf16_supported() else torch.float16
|
|
|
17 |
model = AutoModelForCausalLM.from_pretrained("Calvin-Xu/FLFL", device_map="auto", torch_dtype=torch_dtype)
|
18 |
tokenizer = AutoTokenizer.from_pretrained("Calvin-Xu/FLFL")
|
19 |
|
20 |
prompt_template = """[INST] {instruction}\n{input}\n[/INST]\n"""
|
21 |
sentence = "国境の長いトンネルを抜けると雪国であった"
|
22 |
|
23 |
+
inputs = tokenizer(prompt_template.format(instruction="次の文に正確に振り仮名を付けてください", input=sentence), return_tensors="pt").to(model.device)
|
|
|
|
|
|
|
|
|
|
|
24 |
with torch.no_grad():
|
25 |
tokens = model.generate(**inputs, max_new_tokens=512, do_sample=False)
|
26 |
|