Calvin-Xu commited on
Commit
6b1b639
1 Parent(s): 912bb58

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +26 -0
README.md CHANGED
@@ -9,6 +9,32 @@ metrics:
9
  pipeline_tag: text2text-generation
10
  ---
11
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
  ### Finetuned from
13
  [stockmark/gpt-neox-japanese-1.4b](https://huggingface.co/stockmark/gpt-neox-japanese-1.4b)
14
 
 
9
  pipeline_tag: text2text-generation
10
  ---
11
 
12
+ ```python
13
+ import torch
14
+ from transformers import AutoModelForCausalLM, AutoTokenizer
15
+
16
+ torch_dtype = torch.bfloat16 if torch.cuda.is_available() and hasattr(torch.cuda, "is_bf16_supported") and torch.cuda.is_bf16_supported() else torch.float16
17
+
18
+ model = AutoModelForCausalLM.from_pretrained("Calvin-Xu/FLFL", device_map="auto", torch_dtype=torch_dtype)
19
+ tokenizer = AutoTokenizer.from_pretrained("Calvin-Xu/FLFL")
20
+
21
+ prompt_template = """[INST] {instruction}\n{input}\n[/INST]\n"""
22
+ sentence = "国境の長いトンネルを抜けると雪国であった"
23
+
24
+ inputs = tokenizer(
25
+ prompt_template.format(
26
+ instruction="次の文に正確に振り仮名を付けてください", input=sentence
27
+ ),
28
+ return_tensors="pt",
29
+ ).to(model.device)
30
+ with torch.no_grad():
31
+ tokens = model.generate(**inputs, max_new_tokens=512, do_sample=False)
32
+
33
+ output = tokenizer.decode(tokens[0], skip_special_tokens=False)
34
+ print(output)
35
+
36
+ ```
37
+
38
  ### Finetuned from
39
  [stockmark/gpt-neox-japanese-1.4b](https://huggingface.co/stockmark/gpt-neox-japanese-1.4b)
40