Spaces:
Sleeping
Sleeping
Update app/openai_client.py
Browse files- app/openai_client.py +8 -29
app/openai_client.py
CHANGED
|
@@ -1,7 +1,5 @@
|
|
| 1 |
from __future__ import annotations
|
| 2 |
-
import os
|
| 3 |
-
import json
|
| 4 |
-
import httpx
|
| 5 |
|
| 6 |
OPENAI_BASE = os.environ.get("OPENAI_BASE", "https://api.openai.com/v1")
|
| 7 |
OPENAI_MODEL = os.environ.get("OPENAI_MODEL", "gpt-4o-mini")
|
|
@@ -12,31 +10,20 @@ HEADERS = {
|
|
| 12 |
"Content-Type": "application/json",
|
| 13 |
}
|
| 14 |
|
| 15 |
-
|
| 16 |
async def openai_chat(messages, temperature: float = 0.7, max_tokens: int = 300) -> str:
|
| 17 |
-
"""自由回答(プレーンテキスト)"""
|
| 18 |
if not OPENAI_API_KEY:
|
| 19 |
-
raise RuntimeError("OPENAI_API_KEY is not set
|
| 20 |
-
|
| 21 |
url = f"{OPENAI_BASE}/chat/completions"
|
| 22 |
-
payload = {
|
| 23 |
-
"model": OPENAI_MODEL,
|
| 24 |
-
"messages": messages,
|
| 25 |
-
"temperature": float(temperature),
|
| 26 |
-
"max_tokens": int(max_tokens),
|
| 27 |
-
}
|
| 28 |
async with httpx.AsyncClient(timeout=60) as client:
|
| 29 |
r = await client.post(url, headers=HEADERS, json=payload)
|
| 30 |
r.raise_for_status()
|
| 31 |
data = r.json()
|
| 32 |
return data["choices"][0]["message"]["content"].strip()
|
| 33 |
|
| 34 |
-
|
| 35 |
async def openai_chat_json(messages, temperature: float = 0.2, max_tokens: int = 1200) -> dict:
|
| 36 |
-
"""JSONモード(出力は厳密なJSON文字列)"""
|
| 37 |
if not OPENAI_API_KEY:
|
| 38 |
-
raise RuntimeError("OPENAI_API_KEY is not set
|
| 39 |
-
|
| 40 |
url = f"{OPENAI_BASE}/chat/completions"
|
| 41 |
payload = {
|
| 42 |
"model": OPENAI_MODEL,
|
|
@@ -49,22 +36,15 @@ async def openai_chat_json(messages, temperature: float = 0.2, max_tokens: int =
|
|
| 49 |
r = await client.post(url, headers=HEADERS, json=payload)
|
| 50 |
r.raise_for_status()
|
| 51 |
data = r.json()
|
| 52 |
-
|
| 53 |
-
return json.loads(content)
|
| 54 |
-
|
| 55 |
|
| 56 |
def openai_judge(system: str, user: str) -> dict:
|
| 57 |
-
"""審査用(JSONモード)。同期呼び出し"""
|
| 58 |
if not OPENAI_API_KEY:
|
| 59 |
-
raise RuntimeError("OPENAI_API_KEY is not set
|
| 60 |
-
|
| 61 |
url = f"{OPENAI_BASE}/chat/completions"
|
| 62 |
payload = {
|
| 63 |
"model": OPENAI_MODEL,
|
| 64 |
-
"messages": [
|
| 65 |
-
{"role": "system", "content": system},
|
| 66 |
-
{"role": "user", "content": user},
|
| 67 |
-
],
|
| 68 |
"response_format": {"type": "json_object"},
|
| 69 |
"temperature": 0.0,
|
| 70 |
"max_tokens": 400,
|
|
@@ -73,5 +53,4 @@ def openai_judge(system: str, user: str) -> dict:
|
|
| 73 |
r = client.post(url, headers=HEADERS, json=payload)
|
| 74 |
r.raise_for_status()
|
| 75 |
data = r.json()
|
| 76 |
-
|
| 77 |
-
return json.loads(content)
|
|
|
|
| 1 |
from __future__ import annotations
|
| 2 |
+
import os, json, httpx
|
|
|
|
|
|
|
| 3 |
|
| 4 |
OPENAI_BASE = os.environ.get("OPENAI_BASE", "https://api.openai.com/v1")
|
| 5 |
OPENAI_MODEL = os.environ.get("OPENAI_MODEL", "gpt-4o-mini")
|
|
|
|
| 10 |
"Content-Type": "application/json",
|
| 11 |
}
|
| 12 |
|
|
|
|
| 13 |
async def openai_chat(messages, temperature: float = 0.7, max_tokens: int = 300) -> str:
|
|
|
|
| 14 |
if not OPENAI_API_KEY:
|
| 15 |
+
raise RuntimeError("OPENAI_API_KEY is not set.")
|
|
|
|
| 16 |
url = f"{OPENAI_BASE}/chat/completions"
|
| 17 |
+
payload = {"model": OPENAI_MODEL, "messages": messages, "temperature": float(temperature), "max_tokens": int(max_tokens)}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 18 |
async with httpx.AsyncClient(timeout=60) as client:
|
| 19 |
r = await client.post(url, headers=HEADERS, json=payload)
|
| 20 |
r.raise_for_status()
|
| 21 |
data = r.json()
|
| 22 |
return data["choices"][0]["message"]["content"].strip()
|
| 23 |
|
|
|
|
| 24 |
async def openai_chat_json(messages, temperature: float = 0.2, max_tokens: int = 1200) -> dict:
|
|
|
|
| 25 |
if not OPENAI_API_KEY:
|
| 26 |
+
raise RuntimeError("OPENAI_API_KEY is not set.")
|
|
|
|
| 27 |
url = f"{OPENAI_BASE}/chat/completions"
|
| 28 |
payload = {
|
| 29 |
"model": OPENAI_MODEL,
|
|
|
|
| 36 |
r = await client.post(url, headers=HEADERS, json=payload)
|
| 37 |
r.raise_for_status()
|
| 38 |
data = r.json()
|
| 39 |
+
return json.loads(data["choices"][0]["message"]["content"].strip())
|
|
|
|
|
|
|
| 40 |
|
| 41 |
def openai_judge(system: str, user: str) -> dict:
|
|
|
|
| 42 |
if not OPENAI_API_KEY:
|
| 43 |
+
raise RuntimeError("OPENAI_API_KEY is not set.")
|
|
|
|
| 44 |
url = f"{OPENAI_BASE}/chat/completions"
|
| 45 |
payload = {
|
| 46 |
"model": OPENAI_MODEL,
|
| 47 |
+
"messages": [{"role": "system", "content": system}, {"role": "user", "content": user}],
|
|
|
|
|
|
|
|
|
|
| 48 |
"response_format": {"type": "json_object"},
|
| 49 |
"temperature": 0.0,
|
| 50 |
"max_tokens": 400,
|
|
|
|
| 53 |
r = client.post(url, headers=HEADERS, json=payload)
|
| 54 |
r.raise_for_status()
|
| 55 |
data = r.json()
|
| 56 |
+
return json.loads(data["choices"][0]["message"]["content"].strip())
|
|
|