default->eager
Browse files- deepthought_inference.py +1 -1
deepthought_inference.py
CHANGED
@@ -38,7 +38,7 @@ class DeepthoughtModel:
|
|
38 |
self.model_name,
|
39 |
torch_dtype=torch.bfloat16,
|
40 |
device_map="auto",
|
41 |
-
attn_implementation=("flash_attention_2" if flash_attn_exists else "
|
42 |
use_cache=True,
|
43 |
trust_remote_code=True,
|
44 |
)
|
|
|
38 |
self.model_name,
|
39 |
torch_dtype=torch.bfloat16,
|
40 |
device_map="auto",
|
41 |
+
attn_implementation=("flash_attention_2" if flash_attn_exists else "eager"),
|
42 |
use_cache=True,
|
43 |
trust_remote_code=True,
|
44 |
)
|