Fix undefined LlamaForCausalLM and del try except
Browse files
src/axolotl/utils/models.py
CHANGED
@@ -81,7 +81,6 @@ def load_model(
|
|
81 |
Load a model from a base model and a model type.
|
82 |
"""
|
83 |
|
84 |
-
global LlamaForCausalLM # pylint: disable=global-statement
|
85 |
# TODO refactor as a kwarg
|
86 |
load_in_8bit = cfg.load_in_8bit
|
87 |
cfg.is_llama_derived_model = "llama" in base_model or (
|
@@ -203,12 +202,7 @@ def load_model(
|
|
203 |
)
|
204 |
load_in_8bit = False
|
205 |
elif cfg.is_llama_derived_model:
|
206 |
-
|
207 |
-
from transformers import LlamaForCausalLM
|
208 |
-
except ImportError:
|
209 |
-
logging.warning(
|
210 |
-
"This version of transformers does not support Llama. Consider upgrading."
|
211 |
-
)
|
212 |
|
213 |
config = LlamaConfig.from_pretrained(base_model_config)
|
214 |
model = LlamaForCausalLM.from_pretrained(
|
|
|
81 |
Load a model from a base model and a model type.
|
82 |
"""
|
83 |
|
|
|
84 |
# TODO refactor as a kwarg
|
85 |
load_in_8bit = cfg.load_in_8bit
|
86 |
cfg.is_llama_derived_model = "llama" in base_model or (
|
|
|
202 |
)
|
203 |
load_in_8bit = False
|
204 |
elif cfg.is_llama_derived_model:
|
205 |
+
from transformers import LlamaForCausalLM
|
|
|
|
|
|
|
|
|
|
|
206 |
|
207 |
config = LlamaConfig.from_pretrained(base_model_config)
|
208 |
model = LlamaForCausalLM.from_pretrained(
|