ademarteau commited on
Commit
f8a94b0
·
1 Parent(s): ca85d91

fix: fall back to HF_TOKEN env var for Inference API auth

Browse files
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -1,4 +1,5 @@
1
  import json
 
2
  import re
3
 
4
  import gradio as gr
@@ -193,7 +194,7 @@ def run_llm_simulation(env_name, hf_token):
193
  agent=BaseAgent(dc), # placeholder; we override ROP manually
194
  )
195
 
196
- client = InferenceClient(token=hf_token or None)
197
  convo_history = []
198
  memory_bank = []
199
  current_rop = dc.daily_demand_distribution[HISTO_DAYS].demand_mean * LEAD_TIME
 
1
  import json
2
+ import os
3
  import re
4
 
5
  import gradio as gr
 
194
  agent=BaseAgent(dc), # placeholder; we override ROP manually
195
  )
196
 
197
+ client = InferenceClient(token=hf_token or os.environ.get("HF_TOKEN"))
198
  convo_history = []
199
  memory_bank = []
200
  current_rop = dc.daily_demand_distribution[HISTO_DAYS].demand_mean * LEAD_TIME