togepi55 commited on
Commit
27ef88b
1 Parent(s): 941cf6b

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +8 -2
README.md CHANGED
@@ -38,10 +38,12 @@ from transformers import (
38
  AutoModelForCausalLM,
39
  BitsAndBytesConfig,
40
  )
 
41
  from transformers import TextStreamer
42
 
43
 
44
- BASE_MODEL = "togepi55/llm-jp-3-13b-it"
 
45
 
46
  tokenizer = AutoTokenizer.from_pretrained(BASE_MODEL)
47
  bnb_config = BitsAndBytesConfig(
@@ -51,7 +53,7 @@ bnb_config = BitsAndBytesConfig(
51
  bnb_4bit_use_double_quant=False,
52
  )
53
 
54
- model = AutoModelForCausalLM.from_pretrained(
55
  BASE_MODEL,
56
  device_map="auto",
57
  quantization_config=bnb_config,
@@ -59,8 +61,12 @@ model = AutoModelForCausalLM.from_pretrained(
59
  trust_remote_code=True,
60
  )
61
 
 
 
 
62
  streamer = TextStreamer(tokenizer, skip_prompt=True, skip_special_tokens=True)
63
 
 
64
  instruction = "東京は日本の"
65
 
66
  prompt = f"<s>以下は、タスクを説明する指示です。要求を適切に満たす応答を書きなさい\n\n### 指示:\n{instruction}\n\n### 応答:\n"
 
38
  AutoModelForCausalLM,
39
  BitsAndBytesConfig,
40
  )
41
+ from peft import LoraConfig, PeftModel
42
  from transformers import TextStreamer
43
 
44
 
45
+ BASE_MODEL = "llm-jp/llm-jp-3-13b"
46
+ PEFT_MODEL = "togepi55/llm-jp-3-13b-it"
47
 
48
  tokenizer = AutoTokenizer.from_pretrained(BASE_MODEL)
49
  bnb_config = BitsAndBytesConfig(
 
53
  bnb_4bit_use_double_quant=False,
54
  )
55
 
56
+ base_model = AutoModelForCausalLM.from_pretrained(
57
  BASE_MODEL,
58
  device_map="auto",
59
  quantization_config=bnb_config,
 
61
  trust_remote_code=True,
62
  )
63
 
64
+
65
+ model = PeftModel.from_pretrained(base_model, PEFT_MODEL)
66
+
67
  streamer = TextStreamer(tokenizer, skip_prompt=True, skip_special_tokens=True)
68
 
69
+
70
  instruction = "東京は日本の"
71
 
72
  prompt = f"<s>以下は、タスクを説明する指示です。要求を適切に満たす応答を書きなさい\n\n### 指示:\n{instruction}\n\n### 応答:\n"