pavankumarbalijepalli
commited on
Commit
•
41f9f32
1
Parent(s):
cfff31d
Modify Downstream Usage
Browse files
README.md
CHANGED
@@ -88,11 +88,12 @@ print(response['choices'][0]['text'].strip())
|
|
88 |
<!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app -->
|
89 |
|
90 |
```python
|
|
|
91 |
import torch
|
92 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
93 |
from peft import PeftModel, PeftConfig
|
94 |
|
95 |
-
model_name = "
|
96 |
|
97 |
model = AutoModelForCausalLM.from_pretrained(
|
98 |
model_name,
|
@@ -101,12 +102,10 @@ model = AutoModelForCausalLM.from_pretrained(
|
|
101 |
)
|
102 |
|
103 |
tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
|
104 |
-
inputs = tokenizer(
|
105 |
inputs.to('cuda')
|
106 |
|
107 |
-
|
108 |
-
trained_model = PeftModel.from_pretrained(model, model_id)
|
109 |
-
outputs = trained_model.generate(**inputs, max_length=1000)
|
110 |
text = tokenizer.batch_decode(outputs,skip_special_tokens=True)[0]
|
111 |
print(text)
|
112 |
```
|
|
|
88 |
<!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app -->
|
89 |
|
90 |
```python
|
91 |
+
# USING ON GPU MACHINE
|
92 |
import torch
|
93 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
94 |
from peft import PeftModel, PeftConfig
|
95 |
|
96 |
+
model_name = "pavankumarbalijepalli/phi2-sqlcoder"
|
97 |
|
98 |
model = AutoModelForCausalLM.from_pretrained(
|
99 |
model_name,
|
|
|
102 |
)
|
103 |
|
104 |
tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
|
105 |
+
inputs = tokenizer(prompt, return_tensors="pt", padding=True, truncation=True)
|
106 |
inputs.to('cuda')
|
107 |
|
108 |
+
outputs = model.generate(**inputs, max_length=1000)
|
|
|
|
|
109 |
text = tokenizer.batch_decode(outputs,skip_special_tokens=True)[0]
|
110 |
print(text)
|
111 |
```
|