AkimfromParis commited on
Commit
2c70442
1 Parent(s): d288799

Update generated text

Browse files
Files changed (1) hide show
  1. README.md +12 -10
README.md CHANGED
@@ -43,20 +43,22 @@ dtype: bfloat16
43
  ## 🤗 Usage for HuggingFace
44
 
45
  ```python
46
- !pip install -qU transformers accelerate
 
 
47
 
48
- from transformers import AutoModelForCausalLM, AutoTokenizer
49
 
50
- model_path = "AkimfromParis/Neroli-Rak-Lig-slerp-7B"
51
- tokenizer = AutoTokenizer.from_pretrained(model_path)
52
- model = AutoModelForCausalLM.from_pretrained(model_path, torch_dtype="auto", device_map="auto")
 
53
 
54
  messages = [
55
- {"role": "system", "content": "あなたはAIアシスタントです。"},
56
- {"role": "user", "content": "大谷翔平選手について教えてください"}
57
- ]
58
- gen_input = tokenizer.apply_chat_template(messages, return_tensors="pt")
59
- model.generate(**gen_input)
60
  ```
61
 
62
  # 🔖 Citation
 
43
  ## 🤗 Usage for HuggingFace
44
 
45
  ```python
46
+ from transformers import AutoTokenizer, AutoModelForCausalLM
47
+ from transformers import pipeline
48
+ import torch
49
 
50
+ model_name = "AkimfromParis/Neroli-Rak-Lig-slerp-7B"
51
 
52
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
53
+ model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.bfloat16)
54
+
55
+ pipe = pipeline("text-generation", model=model, tokenizer=tokenizer, pad_token_id=tokenizer.eos_token_id)
56
 
57
  messages = [
58
+ {"role": "system","content": "あなたは誠実で優秀な日本人のアシスタントです。以下のトピックに関する詳細な情報を提供してください。"},
59
+ {"role": "user", "content": "大谷翔平選手は誰ですか?"},
60
+ ]
61
+ print(pipe(messages, max_new_tokens=512)[0]['generated_text'][-1])
 
62
  ```
63
 
64
  # 🔖 Citation