Spaces:
Sleeping
Sleeping
File size: 391 Bytes
04653e2 | 1 2 3 4 5 6 7 8 9 | from transformers import AutoModelForCausalLM, AutoTokenizer
model_name = "deepseek-ai/deepseek-coder-1.3b-instruct"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name, device_map="cpu")
inputs = tokenizer("def add(x, y):", return_tensors="pt")
outputs = model.generate(**inputs, max_length=30)
print(tokenizer.decode(outputs[0]))
|