Chris4K commited on
Commit
8bca5d0
·
verified ·
1 Parent(s): 9f91f2f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -4
app.py CHANGED
@@ -54,15 +54,14 @@ class EventScraper:
54
  # Try local model first
55
  if TRANSFORMERS_AVAILABLE:
56
  try:
57
- model_name = "meta-llama/Llama-3.2-1B-Instruct"
58
  self.tokenizer = AutoTokenizer.from_pretrained(model_name)
59
  self.model = AutoModelForCausalLM.from_pretrained(
60
  model_name,
61
  torch_dtype=torch.float16,
62
  return_dict_in_generate=False,
63
  device_map='auto',
64
- max_new_tokens=12000,
65
- return_full_text=False,
66
  )
67
  return
68
  except Exception as local_err:
@@ -105,7 +104,7 @@ class EventScraper:
105
  # Use Inference Client
106
  return self.client.text_generation(
107
  prompt,
108
- max_new_tokens=12000,
109
  temperature=0.9
110
  )
111
 
 
54
  # Try local model first
55
  if TRANSFORMERS_AVAILABLE:
56
  try:
57
+ model_name = "meta-llama/Llama-3.2-3B-Instruct"
58
  self.tokenizer = AutoTokenizer.from_pretrained(model_name)
59
  self.model = AutoModelForCausalLM.from_pretrained(
60
  model_name,
61
  torch_dtype=torch.float16,
62
  return_dict_in_generate=False,
63
  device_map='auto',
64
+ return_full_text=False
 
65
  )
66
  return
67
  except Exception as local_err:
 
104
  # Use Inference Client
105
  return self.client.text_generation(
106
  prompt,
107
+ max_new_tokens=3000,
108
  temperature=0.9
109
  )
110