Update llm_client.py
Browse files- llm_client.py +13 -6
llm_client.py
CHANGED
|
@@ -1,10 +1,17 @@
|
|
| 1 |
# llm_client.py
|
|
|
|
|
|
|
| 2 |
|
| 3 |
-
|
| 4 |
|
| 5 |
def chat_teaching_question(question: str) -> str:
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
# llm_client.py
|
| 2 |
+
from llm_prompt import SYSTEM_PROMPT, build_bayes_explanation_prompt
|
| 3 |
+
from openai import OpenAI
|
| 4 |
|
| 5 |
+
client = OpenAI()
|
| 6 |
|
| 7 |
def chat_teaching_question(question: str) -> str:
|
| 8 |
+
response = client.chat.completions.create(
|
| 9 |
+
model="gpt-4o-mini",
|
| 10 |
+
messages=[
|
| 11 |
+
{"role": "system", "content": SYSTEM_PROMPT},
|
| 12 |
+
{"role": "user", "content": build_bayes_explanation_prompt(question)}
|
| 13 |
+
],
|
| 14 |
+
temperature=0.3
|
| 15 |
+
)
|
| 16 |
+
return response.choices[0].message.content
|
| 17 |
+
|