michaelfeil commited on
Commit
792a641
1 Parent(s): 7604d30

Update configuration_nvembed.py

Browse files

File "/app/.cache/huggingface/modules/transformers_modules/nvidia/NV-Embed-v2/7604d305b621f14095a1aa23d351674c2859553a/modeling_nvembed.py", line 323, in __init__
self.latent_attention_model = AutoModel.from_config(config.latent_attention_config)
File "/app/.venv/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py", line 440, in from_config
return model_class._from_config(config, **kwargs)
File "/app/.venv/lib/python3.10/site-packages/transformers/modeling_utils.py", line 1494, in _from_config
if config._attn_implementation_internal is not None:
File "/app/.venv/lib/python3.10/site-packages/transformers/configuration_utils.py", line 202, in __getattribute__
return super().__getattribute__(key)
AttributeError: 'LatentAttentionConfig' object has no attribute '_attn_implementation_internal'

Files changed (1) hide show
  1. configuration_nvembed.py +2 -0
configuration_nvembed.py CHANGED
@@ -76,6 +76,8 @@ class LatentAttentionConfig(PretrainedConfig):
76
  self.latent_dim = latent_dim
77
  self.cross_dim_head = cross_dim_head
78
 
 
 
79
 
80
  class BidirectionalMistralConfig(MistralConfig):
81
  model_type = BIDIR_MISTRAL_TYPE
 
76
  self.latent_dim = latent_dim
77
  self.cross_dim_head = cross_dim_head
78
 
79
+ super().__init__(**kwargs)
80
+
81
 
82
  class BidirectionalMistralConfig(MistralConfig):
83
  model_type = BIDIR_MISTRAL_TYPE