Navya-Sree commited on
Commit
cbe8f8a
·
verified ·
1 Parent(s): 1794932

Update src/macg/llm_openai.py

Browse files
Files changed (1) hide show
  1. src/macg/llm_openai.py +19 -19
src/macg/llm_openai.py CHANGED
@@ -10,7 +10,7 @@ class OpenAIResponsesLLM(LLMClient):
10
  api_key: str,
11
  model: str = "gpt-5",
12
  base_url: str = "https://api.openai.com/v1",
13
- temperature: float = 0.2,
14
  max_output_tokens: int = 900,
15
  ) -> None:
16
  if not api_key:
@@ -21,25 +21,25 @@ class OpenAIResponsesLLM(LLMClient):
21
  self.client = OpenAI(api_key=api_key, base_url=base_url)
22
 
23
  def complete(self, system: str, prompt: str) -> str:
24
- params = dict(
25
- model=self.model,
26
- instructions=system,
27
- input=prompt,
28
- max_output_tokens=self.max_output_tokens,
29
- )
30
 
31
- # try sending temperature (some models accept it)
32
- if self.temperature is not None:
33
- params["temperature"] = self.temperature
34
 
35
- try:
36
- resp = self.client.responses.create(**params)
37
- return resp.output_text
38
- except Exception as e:
39
- msg = str(e)
40
- # If the model doesn't support temperature, retry without it
41
- if "Unsupported parameter" in msg and "temperature" in msg:
42
- params.pop("temperature", None)
43
  resp = self.client.responses.create(**params)
44
  return resp.output_text
45
- raise
 
 
 
 
 
 
 
 
10
  api_key: str,
11
  model: str = "gpt-5",
12
  base_url: str = "https://api.openai.com/v1",
13
+ temperature: float | None = 0.2,
14
  max_output_tokens: int = 900,
15
  ) -> None:
16
  if not api_key:
 
21
  self.client = OpenAI(api_key=api_key, base_url=base_url)
22
 
23
  def complete(self, system: str, prompt: str) -> str:
24
+ params = {
25
+ "model": self.model,
26
+ "instructions": system,
27
+ "input": prompt,
28
+ "max_output_tokens": self.max_output_tokens,
29
+ }
30
 
31
+ # Some models reject temperature; include it only if set
32
+ if self.temperature is not None:
33
+ params["temperature"] = self.temperature
34
 
35
+ try:
 
 
 
 
 
 
 
36
  resp = self.client.responses.create(**params)
37
  return resp.output_text
38
+ except Exception as e:
39
+ msg = str(e)
40
+ # If the model doesn't support temperature, retry without it
41
+ if "Unsupported parameter" in msg and "temperature" in msg:
42
+ params.pop("temperature", None)
43
+ resp = self.client.responses.create(**params)
44
+ return resp.output_text
45
+ raise