frankai98 commited on
Commit
5bfd693
·
verified ·
1 Parent(s): f1b5d59

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -0
app.py CHANGED
@@ -274,10 +274,12 @@ def main():
274
 
275
  def process_with_gemma(prompt):
276
  try:
 
277
  pipe = pipeline(
278
  "text-generation",
279
  model="unsloth/gemma-3-1b-it",
280
  device="cuda" if torch.cuda.is_available() else -1,
 
281
  torch_dtype=torch.bfloat16,
282
  )
283
  result = pipe(prompt, max_new_tokens=256, return_full_text=False)
 
274
 
275
  def process_with_gemma(prompt):
276
  try:
277
+ tokenizer = AutoTokenizer.from_pretrained("unsloth/gemma-3-1b-it")
278
  pipe = pipeline(
279
  "text-generation",
280
  model="unsloth/gemma-3-1b-it",
281
  device="cuda" if torch.cuda.is_available() else -1,
282
+ tokenizer=tokenizer,
283
  torch_dtype=torch.bfloat16,
284
  )
285
  result = pipe(prompt, max_new_tokens=256, return_full_text=False)