cs-tutor / tutor.py
simonguest's picture
Added age and name as query params
3d7bc32
import openai
import os
from prompts import system_prompt, welcome_prompt, run_code_prompt, is_code_prompt
SYSTEM = "system"
TEACHER = "assistant"
STUDENT = "user"
class Tutor:
def __init__(self, instructions="", starter_code="", age="12", name="", context=None, debug=False):
self.model = "gpt-4"
self.temperature = 0.0
self.api_key = os.getenv("OPENAI_API_KEY")
self.debug = debug
if context is not None:
self.deserialize(context)
else:
self.memory = []
self.instructions = instructions
self.starter_code = starter_code
self.age = age
self.name = name
self.memory.append(
{SYSTEM: system_prompt(self.instructions, self.starter_code, self.age, self.name)}
)
self.memory.append({TEACHER: welcome_prompt()})
def serialize(self):
# Return memory as a dictionary
return {
"instructions": self.instructions,
"starter_code": self.starter_code,
"memory": self.memory,
"age": self.age,
"name": self.name,
}
def deserialize(self, data):
# Make sure incoming data is a dictionary containing "memory"
if isinstance(data, dict) and "memory" in data:
self.memory = data["memory"]
self.instructions = data["instructions"]
self.starter_code = data["starter_code"]
self.age = data["age"]
self.name = data["name"]
else:
raise ValueError("Input must be a dictionary containing 'memory'")
def _gpt(self):
return openai.ChatCompletion.create(
model=self.model,
api_key=self.api_key,
temperature=self.temperature,
messages=self._memory_as_openai_messages(),
stream=True,
)
def chat(self, message, role=STUDENT, request=True):
self.memory.append({role: message})
yield self._memory_as_history()
# Pre-append an empty teacher messages so that we can stream the result
self.memory.append({TEACHER: ""})
if request:
for token in self._gpt():
if token.choices[0].finish_reason != "stop":
self.memory[-1][TEACHER] = (
self.memory[-1][TEACHER] + token.choices[0].delta.content
)
yield self._memory_as_history()
def code(self, editor, output, request=True):
self.memory.append({STUDENT: run_code_prompt(editor, output)})
yield self._memory_as_history()
# Pre-append an empty teacher messages so that we can stream the result
self.memory.append({TEACHER: ""})
if request:
for token in self._gpt():
if token.choices[0].finish_reason != "stop":
self.memory[-1][TEACHER] = (
self.memory[-1][TEACHER] + token.choices[0].delta.content
)
yield self._memory_as_history()
def _memory_as_string(self):
# Convert memory to a formatted string
memory_string = ""
for entry in self.memory:
for role, message in entry.items():
memory_string += f"{role}: {message}\n"
return memory_string
def _memory_as_history(self):
# Convert memory to a list of message pairs
history = []
for i in range(0, len(self.memory), 2): # Step by 2, as we need pairs
# Get messages, ignoring role
if i == 0:
message1 = None # Skip the system prompt
else:
message1 = list(self.memory[i].values())[0]
if message1 is not None and is_code_prompt(message1):
message1 = "Running your code..."
# If there's a next message, get it, else use an empty string
message2 = (
list(self.memory[i + 1].values())[0]
if i + 1 < len(self.memory)
else None
)
if message2 is not None and is_code_prompt(message2):
message2 = "Running your code..."
history.append([message1, message2])
return history
def _memory_as_openai_messages(self):
# Convert memory to a list of OpenAI style messages
messages = []
messages.append(
{
"role": SYSTEM,
"content": system_prompt(self.instructions, self.starter_code, self.age, self.name),
}
)
for entry in self.memory:
for role, message in entry.items():
messages.append({"role": role, "content": message})
return messages