recast2-G16W128H64 / __init__.py
appledora's picture
Upload __init__.py with huggingface_hub
10a6572 verified
raw
history blame contribute delete
985 Bytes
# filename: __init__.py
from transformers.utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_torch_available,
)
try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_recast_llama import (
RECAST7b_llamaModel,
RECAST7b_LlamaForCausalLM,
)
from .configuration_recast_llama import RECAST7b_llama
from transformers import AutoConfig, AutoModel, AutoModelForCausalLM
# Register your models with Auto classes
AutoConfig.register("recast7b_llama", RECAST7b_llama)
AutoModel.register(RECAST7b_llama, RECAST7b_llamaModel)
AutoModelForCausalLM.register(RECAST7b_llama, RECAST7b_LlamaForCausalLM)
_import_structure = {
"configuration_recast_llama": ["RECAST7b_llama"],
"modeling_recast_llama": ["RECAST7b_llamaModel", "RECAST7b_LlamaForCausalLM"],
}
__all__ = ["RECAST7b_llamaModel", "RECAST7b_LlamaForCausalLM", "RECAST7b_llama"]