|
|
from transformers import AutoTokenizer |
|
|
from thai_llm.configuration_thai_llm import ThaiLLMConfig |
|
|
from thai_llm.modeling_thai_llm import ThaiLLMForCausalLM |
|
|
from transformers.models.auto.configuration_auto import CONFIG_MAPPING |
|
|
from transformers.models.auto.auto_factory import MODEL_FOR_CAUSAL_LM_MAPPING |
|
|
|
|
|
|
|
|
CONFIG_MAPPING.register("thai_llm", ThaiLLMConfig) |
|
|
MODEL_FOR_CAUSAL_LM_MAPPING.register(ThaiLLMConfig, ThaiLLMForCausalLM) |
|
|
|
|
|
model_path = "./model" |
|
|
tokenizer = AutoTokenizer.from_pretrained(model_path) |
|
|
model = ThaiLLMForCausalLM.from_pretrained(model_path) |
|
|
|
|
|
|
|
|
inputs = tokenizer("ประเทศไทย", return_tensors="pt") |
|
|
outputs = model.generate(**inputs, max_new_tokens=20) |
|
|
print(tokenizer.decode(outputs[0], skip_special_tokens=True)) |