File size: 4,797 Bytes
fbd6b13
969d7ed
fbd6b13
969d7ed
fbd6b13
 
 
969d7ed
 
fbd6b13
3d7bc32
fbd6b13
 
 
 
969d7ed
fbd6b13
 
 
 
 
 
3d7bc32
 
fbd6b13
3d7bc32
fbd6b13
 
969d7ed
fbd6b13
 
 
 
 
 
3d7bc32
 
fbd6b13
969d7ed
fbd6b13
 
 
 
 
 
3d7bc32
 
fbd6b13
 
969d7ed
fbd6b13
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3d7bc32
fbd6b13
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
import openai
import os
from prompts import system_prompt, welcome_prompt, run_code_prompt, is_code_prompt

SYSTEM = "system"
TEACHER = "assistant"
STUDENT = "user"


class Tutor:
    def __init__(self, instructions="", starter_code="", age="12", name="", context=None, debug=False):
        self.model = "gpt-4"
        self.temperature = 0.0
        self.api_key = os.getenv("OPENAI_API_KEY")
        self.debug = debug

        if context is not None:
            self.deserialize(context)
        else:
            self.memory = []
            self.instructions = instructions
            self.starter_code = starter_code
            self.age = age
            self.name = name
            self.memory.append(
                {SYSTEM: system_prompt(self.instructions, self.starter_code, self.age, self.name)}
            )
            self.memory.append({TEACHER: welcome_prompt()})

    def serialize(self):
        # Return memory as a dictionary
        return {
            "instructions": self.instructions,
            "starter_code": self.starter_code,
            "memory": self.memory,
            "age": self.age,
            "name": self.name,
        }

    def deserialize(self, data):
        # Make sure incoming data is a dictionary containing "memory"
        if isinstance(data, dict) and "memory" in data:
            self.memory = data["memory"]
            self.instructions = data["instructions"]
            self.starter_code = data["starter_code"]
            self.age = data["age"]
            self.name = data["name"]
        else:
            raise ValueError("Input must be a dictionary containing 'memory'")

    def _gpt(self):
        return openai.ChatCompletion.create(
            model=self.model,
            api_key=self.api_key,
            temperature=self.temperature,
            messages=self._memory_as_openai_messages(),
            stream=True,
        )

    def chat(self, message, role=STUDENT, request=True):
        self.memory.append({role: message})
        yield self._memory_as_history()
        # Pre-append an empty teacher messages so that we can stream the result
        self.memory.append({TEACHER: ""})
        if request:
            for token in self._gpt():
                if token.choices[0].finish_reason != "stop":
                    self.memory[-1][TEACHER] = (
                        self.memory[-1][TEACHER] + token.choices[0].delta.content
                    )
                    yield self._memory_as_history()

    def code(self, editor, output, request=True):
        self.memory.append({STUDENT: run_code_prompt(editor, output)})
        yield self._memory_as_history()
        # Pre-append an empty teacher messages so that we can stream the result
        self.memory.append({TEACHER: ""})
        if request:
            for token in self._gpt():
                if token.choices[0].finish_reason != "stop":
                    self.memory[-1][TEACHER] = (
                        self.memory[-1][TEACHER] + token.choices[0].delta.content
                    )
                    yield self._memory_as_history()

    def _memory_as_string(self):
        # Convert memory to a formatted string
        memory_string = ""
        for entry in self.memory:
            for role, message in entry.items():
                memory_string += f"{role}: {message}\n"
        return memory_string

    def _memory_as_history(self):
        # Convert memory to a list of message pairs
        history = []
        for i in range(0, len(self.memory), 2):  # Step by 2, as we need pairs
            # Get messages, ignoring role
            if i == 0:
                message1 = None  # Skip the system prompt
            else:
                message1 = list(self.memory[i].values())[0]
            if message1 is not None and is_code_prompt(message1):
                message1 = "Running your code..."
            # If there's a next message, get it, else use an empty string
            message2 = (
                list(self.memory[i + 1].values())[0]
                if i + 1 < len(self.memory)
                else None
            )
            if message2 is not None and is_code_prompt(message2):
                message2 = "Running your code..."
            history.append([message1, message2])
        return history

    def _memory_as_openai_messages(self):
        # Convert memory to a list of OpenAI style messages
        messages = []
        messages.append(
            {
                "role": SYSTEM,
                "content": system_prompt(self.instructions, self.starter_code, self.age, self.name),
            }
        )
        for entry in self.memory:
            for role, message in entry.items():
                messages.append({"role": role, "content": message})
        return messages