Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -1669,14 +1669,14 @@ def openai_call(prompt: str, messages: list = None, model: str = "o3-mini",
|
|
| 1669 |
client = openai.OpenAI(api_key=os.getenv('OPENAI_API_KEY'))
|
| 1670 |
params = {
|
| 1671 |
"model": model,
|
| 1672 |
-
"messages": messages
|
| 1673 |
-
"temperature": temperature
|
| 1674 |
}
|
| 1675 |
# Note: Adjust token names based on the model
|
| 1676 |
if model == "o3-mini":
|
| 1677 |
params["max_completion_tokens"] = max_tokens_param
|
| 1678 |
else:
|
| 1679 |
params["max_tokens"] = max_tokens_param
|
|
|
|
| 1680 |
response = client.chat.completions.create(**params)
|
| 1681 |
result = response.choices[0].message.content
|
| 1682 |
result = result.strip().strip("json").strip("```").strip()
|
|
|
|
| 1669 |
client = openai.OpenAI(api_key=os.getenv('OPENAI_API_KEY'))
|
| 1670 |
params = {
|
| 1671 |
"model": model,
|
| 1672 |
+
"messages": messages
|
|
|
|
| 1673 |
}
|
| 1674 |
# Note: Adjust token names based on the model
|
| 1675 |
if model == "o3-mini":
|
| 1676 |
params["max_completion_tokens"] = max_tokens_param
|
| 1677 |
else:
|
| 1678 |
params["max_tokens"] = max_tokens_param
|
| 1679 |
+
parms["temperature"]: temperature
|
| 1680 |
response = client.chat.completions.create(**params)
|
| 1681 |
result = response.choices[0].message.content
|
| 1682 |
result = result.strip().strip("json").strip("```").strip()
|