llm-o1 / run_model.py
JonusNattapong's picture
Create run_model.py
09b76eb verified
from transformers import AutoTokenizer
from thai_llm.configuration_thai_llm import ThaiLLMConfig
from thai_llm.modeling_thai_llm import ThaiLLMForCausalLM
from transformers.models.auto.configuration_auto import CONFIG_MAPPING
from transformers.models.auto.auto_factory import MODEL_FOR_CAUSAL_LM_MAPPING
# Register model_type and class
CONFIG_MAPPING.register("thai_llm", ThaiLLMConfig)
MODEL_FOR_CAUSAL_LM_MAPPING.register(ThaiLLMConfig, ThaiLLMForCausalLM)
model_path = "./model" # หรือ Hugging Face model ID
tokenizer = AutoTokenizer.from_pretrained(model_path)
model = ThaiLLMForCausalLM.from_pretrained(model_path)
# Text generation
inputs = tokenizer("ประเทศไทย", return_tensors="pt")
outputs = model.generate(**inputs, max_new_tokens=20)
print(tokenizer.decode(outputs[0], skip_special_tokens=True))