File size: 988 Bytes
7a1d06b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
from transformers.utils import (
    OptionalDependencyNotAvailable,
    _LazyModule,
    is_torch_available,
)

try:
    if not is_torch_available():
        raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
    pass
else:
    from .modeling_recastmlp_llama import (
        RECASTMLP_llamaModel,
        RECASTMLP_LlamaForCausalLM,
    )
    from .configuration_recastmlp_llama import RECASTMLP_llama

from transformers import AutoConfig, AutoModel, AutoModelForCausalLM

# Register your models with Auto classes
AutoConfig.register("recastmlp_llama", RECASTMLP_llama)
AutoModel.register(RECASTMLP_llama, RECASTMLP_llamaModel)
AutoModelForCausalLM.register(RECASTMLP_llama, RECASTMLP_LlamaForCausalLM)

_import_structure = {
    "configuration_recastmlp_llama": ["RECASTMLP_llama"],
    "modeling_recastmlp_llama": ["RECASTMLP_llamaModel", "RECASTMLP_LlamaForCausalLM"],
}

__all__ = ["RECASTMLP_llamaModel", "RECASTMLP_LlamaForCausalLM", "RECASTMLP_llama"]