Spaces:
Runtime error
Runtime error
bring back automodelforcausalLM
Browse files- app_dialogue.py +2 -2
app_dialogue.py
CHANGED
@@ -36,7 +36,7 @@ from transformers import (
|
|
36 |
AutoTokenizer,
|
37 |
AutoProcessor,
|
38 |
AutoConfig,
|
39 |
-
|
40 |
)
|
41 |
|
42 |
|
@@ -73,7 +73,7 @@ def load_processor_tokenizer_model(model_name):
|
|
73 |
# Decrease 2 for Pytorch overhead and 2 for the forward to be safe
|
74 |
max_memory_map[key] = f"{max_memory_map[key] - 4} GiB"
|
75 |
|
76 |
-
model =
|
77 |
model_name,
|
78 |
use_auth_token=os.getenv("HF_AUTH_TOKEN", True),
|
79 |
device_map="auto",
|
|
|
36 |
AutoTokenizer,
|
37 |
AutoProcessor,
|
38 |
AutoConfig,
|
39 |
+
AutoModelForCausalLM,
|
40 |
)
|
41 |
|
42 |
|
|
|
73 |
# Decrease 2 for Pytorch overhead and 2 for the forward to be safe
|
74 |
max_memory_map[key] = f"{max_memory_map[key] - 4} GiB"
|
75 |
|
76 |
+
model = AutoModelForCausalLM.from_pretrained(
|
77 |
model_name,
|
78 |
use_auth_token=os.getenv("HF_AUTH_TOKEN", True),
|
79 |
device_map="auto",
|