Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,12 +1,23 @@
|
|
1 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
2 |
from peft import LoraConfig, get_peft_model
|
3 |
import torch
|
4 |
-
|
5 |
-
|
|
|
|
|
|
|
|
|
6 |
print("step 1 ")
|
7 |
-
|
8 |
-
print("step 2
|
9 |
-
model =
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
def generator(text):
|
11 |
inputs = tokenizer(
|
12 |
[
|
|
|
1 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
2 |
from peft import LoraConfig, get_peft_model
|
3 |
import torch
|
4 |
+
|
5 |
+
|
6 |
+
from peft import PeftModel, PeftConfig
|
7 |
+
from transformers import AutoModelForCausalLM
|
8 |
+
|
9 |
+
config = PeftConfig.from_pretrained("youssef227/llama-3-8b-Instruct-bnb-telcom-3")
|
10 |
print("step 1 ")
|
11 |
+
base_model = AutoModelForCausalLM.from_pretrained("unsloth/llama-3-8b-Instruct-bnb-4bit")
|
12 |
+
print("step 2")
|
13 |
+
model = PeftModel.from_pretrained(base_model, "youssef227/llama-3-8b-Instruct-bnb-telcom-3")
|
14 |
+
print("step 3")
|
15 |
+
|
16 |
+
# Load the tokenizer and model
|
17 |
+
# print("step 1 ")
|
18 |
+
# tokenizer = AutoTokenizer.from_pretrained("youssef227/llama-3-8b-Instruct-bnb-telcom-3")
|
19 |
+
# print("step 2 ")
|
20 |
+
# model = AutoModelForCausalLM.from_pretrained("youssef227/llama-3-8b-Instruct-bnb-telcom-3")
|
21 |
def generator(text):
|
22 |
inputs = tokenizer(
|
23 |
[
|