Fix modeling_jamba.py
Browse files- modeling_jamba.py +1 -1
modeling_jamba.py
CHANGED
@@ -51,7 +51,7 @@ from transformers.utils import (
|
|
51 |
replace_return_docstrings,
|
52 |
)
|
53 |
from transformers.utils.import_utils import is_torch_fx_available
|
54 |
-
from configuration_jamba import JambaConfig
|
55 |
|
56 |
|
57 |
# try except block so it'll work with trust_remote_code. Later we can have `if is_flash_attn_2_available():`
|
|
|
51 |
replace_return_docstrings,
|
52 |
)
|
53 |
from transformers.utils.import_utils import is_torch_fx_available
|
54 |
+
from .configuration_jamba import JambaConfig
|
55 |
|
56 |
|
57 |
# try except block so it'll work with trust_remote_code. Later we can have `if is_flash_attn_2_available():`
|