Nitish commited on
Commit
e30d231
·
1 Parent(s): 3210c1c

fix(inference): validate base_url, add trailing slash, clear OPENAI_* env conflicts, upgrade openai version

Browse files
Files changed (2) hide show
  1. inference.py +18 -2
  2. requirements.txt +1 -1
inference.py CHANGED
@@ -172,13 +172,29 @@ def run_task(task_id: str, task_num: int, client: OpenAI) -> dict:
172
 
173
  def main():
174
  # Read proxy config directly from environment — NO fallbacks, NO .env loading
175
- api_base = os.environ["API_BASE_URL"]
176
- api_key = os.environ["API_KEY"]
 
 
 
 
 
 
 
 
 
 
177
 
178
  print(f"[INFO] API_BASE_URL = {api_base}", flush=True)
179
  print(f"[INFO] MODEL_NAME = {MODEL_NAME}", flush=True)
180
  print(f"[INFO] ENV_URL = {ENV_URL}", flush=True)
181
 
 
 
 
 
 
 
182
  # Initialize OpenAI client pointing at grader's LiteLLM proxy
183
  client = OpenAI(base_url=api_base, api_key=api_key)
184
  print(f"[INFO] OpenAI client initialized successfully", flush=True)
 
172
 
173
  def main():
174
  # Read proxy config directly from environment — NO fallbacks, NO .env loading
175
+ api_base = os.environ.get("API_BASE_URL", "").strip()
176
+ api_key = os.environ.get("API_KEY", "").strip()
177
+
178
+ # Validate — crash with a clear message if the grader didn't inject values
179
+ if not api_base:
180
+ raise RuntimeError("API_BASE_URL is empty or not set")
181
+ if not api_key:
182
+ raise RuntimeError("API_KEY is empty or not set")
183
+
184
+ # Ensure base_url ends with / (httpx requires it)
185
+ if not api_base.endswith("/"):
186
+ api_base += "/"
187
 
188
  print(f"[INFO] API_BASE_URL = {api_base}", flush=True)
189
  print(f"[INFO] MODEL_NAME = {MODEL_NAME}", flush=True)
190
  print(f"[INFO] ENV_URL = {ENV_URL}", flush=True)
191
 
192
+ # Clear any conflicting OPENAI_* env vars so the openai library
193
+ # doesn't silently override our base_url / api_key
194
+ os.environ.pop("OPENAI_BASE_URL", None)
195
+ os.environ.pop("OPENAI_API_KEY", None)
196
+ os.environ.pop("OPENAI_API_BASE", None)
197
+
198
  # Initialize OpenAI client pointing at grader's LiteLLM proxy
199
  client = OpenAI(base_url=api_base, api_key=api_key)
200
  print(f"[INFO] OpenAI client initialized successfully", flush=True)
requirements.txt CHANGED
@@ -4,5 +4,5 @@ httptools
4
  uvloop
5
  pydantic==2.7.4
6
  requests==2.32.3
7
- openai==1.40.0
8
  python-dotenv==1.0.1
 
4
  uvloop
5
  pydantic==2.7.4
6
  requests==2.32.3
7
+ openai>=1.40.0
8
  python-dotenv==1.0.1