# Load model directly
from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("blueapple8259/test_model1")
model = AutoModelForCausalLM.from_pretrained("blueapple8259/test_model1")Quick Links
maywell/ko_wikidata_QA๋ฐ์ดํฐ์ ์ output๋ง ์ฌ์ฉํด์ ํ์ตํ์์ผ๋ฉฐ ์์ด๋ ์ง์ ์ ๋ฉ๋๋ค.
- Downloads last month
- 14
# Use a pipeline as a high-level helper from transformers import pipeline pipe = pipeline("text-generation", model="blueapple8259/test_model1")