kiseich commited on
Commit
7adae94
1 Parent(s): 5031881
Files changed (1) hide show
  1. README.md +24 -24
README.md CHANGED
@@ -15,35 +15,35 @@ base_model:
15
 
16
  #推論時のコード
17
 
18
- !pip install -U bitsandbytes
19
- !pip install -U transformers
20
- !pip install -U accelerate
21
- !pip install -U datasets
22
- !pip install -U peft
23
- !pip install ipywidgets --upgrade
24
-
25
- from transformers import (
26
- AutoModelForCausalLM,
27
- AutoTokenizer,
28
- BitsAndBytesConfig,
29
- )
30
- from peft import PeftModel
31
- import torch
32
- from tqdm import tqdm
33
- import json
34
 
35
  # Hugging Faceで取得したTokenをこちらに貼る。
36
- HF_TOKEN = "YOUR_HF_TOKEN"
37
 
38
- model_id = "llm-jp/llm-jp-3-13b"
39
- adapter_id = "kiseich/llm-jp-3-13b-Etask"
40
 
41
  # QLoRA config
42
- bnb_config = BitsAndBytesConfig(
43
- load_in_4bit=True,
44
- bnb_4bit_quant_type="nf4",
45
- bnb_4bit_compute_dtype=torch.bfloat16,
46
- )
47
 
48
  # Load model
49
  model = AutoModelForCausalLM.from_pretrained(
 
15
 
16
  #推論時のコード
17
 
18
+ !pip install -U bitsandbytes
19
+ !pip install -U transformers
20
+ !pip install -U accelerate
21
+ !pip install -U datasets
22
+ !pip install -U peft
23
+ !pip install ipywidgets --upgrade
24
+
25
+ from transformers import (
26
+ AutoModelForCausalLM,
27
+ AutoTokenizer,
28
+ BitsAndBytesConfig,
29
+ )
30
+ from peft import PeftModel
31
+ import torch
32
+ from tqdm import tqdm
33
+ import json
34
 
35
  # Hugging Faceで取得したTokenをこちらに貼る。
36
+ HF_TOKEN = "YOUR_HF_TOKEN"
37
 
38
+ model_id = "llm-jp/llm-jp-3-13b"
39
+ adapter_id = "kiseich/llm-jp-3-13b-Etask"
40
 
41
  # QLoRA config
42
+ bnb_config = BitsAndBytesConfig(
43
+ load_in_4bit=True,
44
+ bnb_4bit_quant_type="nf4",
45
+ bnb_4bit_compute_dtype=torch.bfloat16,
46
+ )
47
 
48
  # Load model
49
  model = AutoModelForCausalLM.from_pretrained(