# Load model directly
from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("noahshinn/ts-code2td", trust_remote_code=True)
model = AutoModelForCausalLM.from_pretrained("noahshinn/ts-code2td", trust_remote_code=True)Quick Links
README.md exists but content is empty.
- Downloads last month
- 6
# Use a pipeline as a high-level helper # Warning: Pipeline type "translation" is no longer supported in transformers v5. # You must load the model directly (see below) or downgrade to v4.x with: # 'pip install "transformers<5.0.0' from transformers import pipeline pipe = pipeline("translation", model="noahshinn/ts-code2td", trust_remote_code=True)