# Load model directly
from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("nakcnx/OTG-Math-680")
model = AutoModelForCausalLM.from_pretrained("nakcnx/OTG-Math-680")Quick Links
OTG-Math-680
This model is fine-tuned version of Open Thai GPT with Thai Math QA 680 pairs dataset (GSM8K, GPT-3.5 Generated, Chain of Thought).
- Downloads last month
- 13
# Use a pipeline as a high-level helper from transformers import pipeline pipe = pipeline("text-generation", model="nakcnx/OTG-Math-680")