Chris4K commited on
Commit
2e88831
·
verified ·
1 Parent(s): 415d65e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -3
app.py CHANGED
@@ -60,7 +60,8 @@ class EventScraper:
60
  model_name,
61
  torch_dtype=torch.float16,
62
  return_dict_in_generate=False,
63
- device_map='auto'
 
64
  )
65
  return
66
  except Exception as local_err:
@@ -75,12 +76,14 @@ class EventScraper:
75
  if hf_token:
76
  self.client = InferenceClient(
77
  model="meta-llama/Llama-3.2-3B-Instruct",
78
- token=hf_token
 
79
  )
80
  else:
81
  # Public model access without token
82
  self.client = InferenceClient(
83
- model="meta-llama/Llama-3.2-3B-Instruct"
 
84
  )
85
  except Exception as e:
86
  gr.Warning(f"Inference Client setup error: {str(e)}")
 
60
  model_name,
61
  torch_dtype=torch.float16,
62
  return_dict_in_generate=False,
63
+ device_map='auto',
64
+ max_new_tokens=12000
65
  )
66
  return
67
  except Exception as local_err:
 
76
  if hf_token:
77
  self.client = InferenceClient(
78
  model="meta-llama/Llama-3.2-3B-Instruct",
79
+ token=hf_token,
80
+ max_new_tokens=12000
81
  )
82
  else:
83
  # Public model access without token
84
  self.client = InferenceClient(
85
+ model="meta-llama/Llama-3.2-3B-Instruct",
86
+ max_new_tokens=12000
87
  )
88
  except Exception as e:
89
  gr.Warning(f"Inference Client setup error: {str(e)}")