Spaces:
Sleeping
Sleeping
ehristoforu
commited on
Commit
•
b827175
1
Parent(s):
b3116aa
Update app.py
Browse files
app.py
CHANGED
@@ -2,7 +2,7 @@ import torch
|
|
2 |
from PIL import Image
|
3 |
import gradio as gr
|
4 |
import spaces
|
5 |
-
from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
|
6 |
import os
|
7 |
from threading import Thread
|
8 |
|
@@ -41,7 +41,7 @@ model = AutoModelForCausalLM.from_pretrained(
|
|
41 |
torch_dtype=torch.float16,
|
42 |
device_map="auto",
|
43 |
)
|
44 |
-
tokenizer =
|
45 |
|
46 |
@spaces.GPU
|
47 |
def stream_chat(message: str, history: list, temperature: float, max_new_tokens: int, top_p: float, top_k: int, penalty: float):
|
|
|
2 |
from PIL import Image
|
3 |
import gradio as gr
|
4 |
import spaces
|
5 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer, Qwen2Tokenizer, TextIteratorStreamer
|
6 |
import os
|
7 |
from threading import Thread
|
8 |
|
|
|
41 |
torch_dtype=torch.float16,
|
42 |
device_map="auto",
|
43 |
)
|
44 |
+
tokenizer = Qwen2Tokenizer.from_pretrained(MODELS)
|
45 |
|
46 |
@spaces.GPU
|
47 |
def stream_chat(message: str, history: list, temperature: float, max_new_tokens: int, top_p: float, top_k: int, penalty: float):
|