dongsiqie commited on
Commit
e1e08e4
·
1 Parent(s): 072547f

Upload 6 files

Browse files
Files changed (6) hide show
  1. bot_backend.py +231 -0
  2. config.json.example +16 -0
  3. functional.py +116 -0
  4. jupyter_backend.py +100 -0
  5. requirements.txt +10 -0
  6. response_parser.py +200 -0
bot_backend.py ADDED
@@ -0,0 +1,231 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import openai
3
+ import os
4
+ import copy
5
+ import shutil
6
+ from jupyter_backend import *
7
+ from typing import *
8
+
9
+ functions = [
10
+ {
11
+ "name": "execute_code",
12
+ "description": "This function allows you to execute Python code and retrieve the terminal output. If the code "
13
+ "generates image output, the function will return the text '[image]'. The code is sent to a "
14
+ "Jupyter kernel for execution. The kernel will remain active after execution, retaining all "
15
+ "variables in memory.",
16
+ "parameters": {
17
+ "type": "object",
18
+ "properties": {
19
+ "code": {
20
+ "type": "string",
21
+ "description": "The code text"
22
+ }
23
+ },
24
+ "required": ["code"],
25
+ }
26
+ }
27
+ ]
28
+
29
+ system_msg = '''You are an AI code interpreter.
30
+ Your goal is to help users do a variety of jobs by executing Python code.
31
+
32
+ You should:
33
+ 1. Comprehend the user's requirements carefully & to the letter.
34
+ 2. Give a brief description for what you plan to do & call the execute_code function to run code
35
+ 3. Provide results analysis based on the execution output.
36
+ 4. If error occurred, try to fix it.
37
+
38
+ Note: If the user uploads a file, you will receive a system message "User uploaded a file: filename". Use the filename as the path in the code. '''
39
+
40
+
41
+ def get_config():
42
+ with open('config.json') as f:
43
+ config = json.load(f)
44
+ return config
45
+
46
+
47
+ def config_openai_api(api_type, api_base, api_version, api_key):
48
+ openai.api_type = api_type
49
+ openai.api_base = api_base
50
+ openai.api_version = api_version
51
+ openai.api_key = api_key
52
+
53
+
54
+ class GPTResponseLog:
55
+ def __init__(self):
56
+ self.assistant_role_name = ''
57
+ self.content = ''
58
+ self.function_name = None
59
+ self.function_args_str = ''
60
+ self.display_code_block = ''
61
+ self.finish_reason = 'stop'
62
+ self.bot_history = None
63
+
64
+ def reset_gpt_response_log_values(self, exclude=None):
65
+ if exclude is None:
66
+ exclude = []
67
+
68
+ attributes = {'assistant_role_name': '',
69
+ 'content': '',
70
+ 'function_name': None,
71
+ 'function_args_str': '',
72
+ 'display_code_block': '',
73
+ 'finish_reason': 'stop',
74
+ 'bot_history': None}
75
+
76
+ for attr_name in exclude:
77
+ del attributes[attr_name]
78
+ for attr_name, value in attributes.items():
79
+ setattr(self, attr_name, value)
80
+
81
+ def set_assistant_role_name(self, assistant_role_name: str):
82
+ self.assistant_role_name = assistant_role_name
83
+
84
+ def add_content(self, content: str):
85
+ self.content += content
86
+
87
+ def set_function_name(self, function_name: str):
88
+ self.function_name = function_name
89
+
90
+ def copy_current_bot_history(self, bot_history: List):
91
+ self.bot_history = copy.deepcopy(bot_history)
92
+
93
+ def add_function_args_str(self, function_args_str: str):
94
+ self.function_args_str += function_args_str
95
+
96
+ def update_display_code_block(self, display_code_block):
97
+ self.display_code_block = display_code_block
98
+
99
+ def update_finish_reason(self, finish_reason: str):
100
+ self.finish_reason = finish_reason
101
+
102
+
103
+ class BotBackend(GPTResponseLog):
104
+ def __init__(self):
105
+ super().__init__()
106
+ self.unique_id = hash(id(self))
107
+ self.jupyter_work_dir = f'cache/work_dir_{self.unique_id}'
108
+ self.jupyter_kernel = JupyterKernel(work_dir=self.jupyter_work_dir)
109
+ self.gpt_model_choice = "GPT-3.5"
110
+ self.revocable_files = []
111
+ self._init_conversation()
112
+ self._init_api_config()
113
+ self._init_kwargs_for_chat_completion()
114
+
115
+ def _init_conversation(self):
116
+ first_system_msg = {'role': 'system', 'content': system_msg}
117
+ if hasattr(self, 'conversation'):
118
+ self.conversation.clear()
119
+ self.conversation.append(first_system_msg)
120
+ else:
121
+ self.conversation: List[Dict] = [first_system_msg]
122
+
123
+ def _init_api_config(self):
124
+ self.config = get_config()
125
+ api_type = self.config['API_TYPE']
126
+ api_base = self.config['API_base']
127
+ api_version = self.config['API_VERSION']
128
+ if self.config['API_KEY']:
129
+ api_key = self.config['API_KEY']
130
+ else:
131
+ api_key = os.getenv('OPENAI_API_KEY')
132
+
133
+ config_openai_api(api_type, api_base, api_version, api_key)
134
+
135
+ def _init_kwargs_for_chat_completion(self):
136
+ self.kwargs_for_chat_completion = {
137
+ 'stream': True,
138
+ 'messages': self.conversation,
139
+ 'functions': functions,
140
+ 'function_call': 'auto'
141
+ }
142
+
143
+ model_name = self.config['model'][self.gpt_model_choice]['model_name']
144
+
145
+ if self.config['API_TYPE'] == 'azure':
146
+ self.kwargs_for_chat_completion['engine'] = model_name
147
+ else:
148
+ self.kwargs_for_chat_completion['model'] = model_name
149
+
150
+ def _clear_all_files_in_work_dir(self):
151
+ for filename in os.listdir(self.jupyter_work_dir):
152
+ os.remove(
153
+ os.path.join(self.jupyter_work_dir, filename)
154
+ )
155
+
156
+ def add_gpt_response_content_message(self):
157
+ self.conversation.append(
158
+ {'role': self.assistant_role_name, 'content': self.content}
159
+ )
160
+
161
+ def add_text_message(self, user_text):
162
+ self.conversation.append(
163
+ {'role': 'user', 'content': user_text}
164
+ )
165
+ self.revocable_files.clear()
166
+ self.update_finish_reason(finish_reason='new_input')
167
+
168
+ def add_file_message(self, path, bot_msg):
169
+ filename = os.path.basename(path)
170
+ work_dir = self.jupyter_work_dir
171
+
172
+ shutil.copy(path, work_dir)
173
+
174
+ gpt_msg = {'role': 'system', 'content': f'User uploaded a file: {filename}'}
175
+ self.conversation.append(gpt_msg)
176
+ self.revocable_files.append(
177
+ {
178
+ 'bot_msg': bot_msg,
179
+ 'gpt_msg': gpt_msg,
180
+ 'path': os.path.join(work_dir, filename)
181
+ }
182
+ )
183
+
184
+ def add_function_call_response_message(self, function_response: str, save_tokens=True):
185
+ self.conversation.append(
186
+ {
187
+ "role": self.assistant_role_name,
188
+ "name": self.function_name,
189
+ "content": self.function_args_str
190
+ }
191
+ )
192
+
193
+ if save_tokens and len(function_response) > 500:
194
+ function_response = f'{function_response[:200]}\n[Output too much, the middle part output is omitted]\n ' \
195
+ f'End part of output:\n{function_response[-200:]}'
196
+ self.conversation.append(
197
+ {
198
+ "role": "function",
199
+ "name": self.function_name,
200
+ "content": function_response,
201
+ }
202
+ )
203
+
204
+ def revoke_file(self):
205
+ if self.revocable_files:
206
+ file = self.revocable_files[-1]
207
+ bot_msg = file['bot_msg']
208
+ gpt_msg = file['gpt_msg']
209
+ path = file['path']
210
+
211
+ assert self.conversation[-1] is gpt_msg
212
+ del self.conversation[-1]
213
+
214
+ os.remove(path)
215
+
216
+ del self.revocable_files[-1]
217
+
218
+ return bot_msg
219
+ else:
220
+ return None
221
+
222
+ def update_gpt_model_choice(self, model_choice):
223
+ self.gpt_model_choice = model_choice
224
+ self._init_kwargs_for_chat_completion()
225
+
226
+ def restart(self):
227
+ self._clear_all_files_in_work_dir()
228
+ self.revocable_files.clear()
229
+ self._init_conversation()
230
+ self.reset_gpt_response_log_values()
231
+ self.jupyter_kernel.restart_jupyter_kernel()
config.json.example ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "API_TYPE": "open_ai",
3
+ "API_base": "https://api.chatanywhere.cn/v1",
4
+ "API_VERSION": null,
5
+ "API_KEY": "",
6
+ "model": {
7
+ "GPT-3.5": {
8
+ "model_name": "gpt-3.5-turbo",
9
+ "available": true
10
+ },
11
+ "GPT-4": {
12
+ "model_name": "",
13
+ "available": false
14
+ }
15
+ }
16
+ }
functional.py ADDED
@@ -0,0 +1,116 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from bot_backend import *
2
+ import base64
3
+ import time
4
+
5
+
6
+ def chat_completion(bot_backend: BotBackend):
7
+ model_choice = bot_backend.gpt_model_choice
8
+ config = bot_backend.config
9
+ kwargs_for_chat_completion = bot_backend.kwargs_for_chat_completion
10
+
11
+ assert config['model'][model_choice]['available'], f"{model_choice} is not available for you API key"
12
+
13
+ response = openai.ChatCompletion.create(**kwargs_for_chat_completion)
14
+ return response
15
+
16
+
17
+ def add_function_response_to_bot_history(content_to_display, history, unique_id):
18
+ images, text = [], []
19
+
20
+ # terminal output
21
+ error_occurred = False
22
+ for mark, out_str in content_to_display:
23
+ if mark in ('stdout', 'execute_result_text', 'display_text'):
24
+ text.append(out_str)
25
+ elif mark in ('execute_result_png', 'execute_result_jpeg', 'display_png', 'display_jpeg'):
26
+ if 'png' in mark:
27
+ images.append(('png', out_str))
28
+ else:
29
+ images.append(('jpg', out_str))
30
+ elif mark == 'error':
31
+ text.append(delete_color_control_char(out_str))
32
+ error_occurred = True
33
+ text = '\n'.join(text).strip('\n')
34
+ if error_occurred:
35
+ history.append([None, f'❌Terminal output:\n```shell\n\n{text}\n```'])
36
+ else:
37
+ history.append([None, f'✔️Terminal output:\n```shell\n{text}\n```'])
38
+
39
+ # image output
40
+ for filetype, img in images:
41
+ image_bytes = base64.b64decode(img)
42
+ temp_path = f'cache/temp_{unique_id}'
43
+ if not os.path.exists(temp_path):
44
+ os.mkdir(temp_path)
45
+ path = f'{temp_path}/{hash(time.time())}.{filetype}'
46
+ with open(path, 'wb') as f:
47
+ f.write(image_bytes)
48
+ history.append(
49
+ [
50
+ None,
51
+ f'<img src=\"file={path}\" style=\'width: 600px; max-width:none; max-height:none\'>'
52
+ ]
53
+ )
54
+
55
+
56
+ def parse_json(function_args: str, finished: bool):
57
+ """
58
+ GPT may generate non-standard JSON format string, which contains '\n' in string value, leading to error when using
59
+ `json.loads()`.
60
+ Here we implement a parser to extract code directly from non-standard JSON string.
61
+ :return: code string if successfully parsed otherwise None
62
+ """
63
+ parser_log = {
64
+ 'met_begin_{': False,
65
+ 'begin_"code"': False,
66
+ 'end_"code"': False,
67
+ 'met_:': False,
68
+ 'met_end_}': False,
69
+ 'met_end_code_"': False,
70
+ "code_begin_index": 0,
71
+ "code_end_index": 0
72
+ }
73
+ try:
74
+ for index, char in enumerate(function_args):
75
+ if char == '{':
76
+ parser_log['met_begin_{'] = True
77
+ elif parser_log['met_begin_{'] and char == '"':
78
+ if parser_log['met_:']:
79
+ if finished:
80
+ parser_log['code_begin_index'] = index + 1
81
+ break
82
+ else:
83
+ if index + 1 == len(function_args):
84
+ return ''
85
+ else:
86
+ temp_code_str = function_args[index + 1:]
87
+ if '\n' in temp_code_str:
88
+ return temp_code_str.strip('\n')
89
+ else:
90
+ return json.loads(function_args + '"}')['code']
91
+ elif parser_log['begin_"code"']:
92
+ parser_log['end_"code"'] = True
93
+ else:
94
+ parser_log['begin_"code"'] = True
95
+ elif parser_log['end_"code"'] and char == ':':
96
+ parser_log['met_:'] = True
97
+ else:
98
+ continue
99
+ if finished:
100
+ for index, char in enumerate(function_args[::-1]):
101
+ back_index = -1 - index
102
+ if char == '}':
103
+ parser_log['met_end_}'] = True
104
+ elif parser_log['met_end_}'] and char == '"':
105
+ parser_log['code_end_index'] = back_index - 1
106
+ break
107
+ else:
108
+ continue
109
+ code_str = function_args[parser_log['code_begin_index']: parser_log['code_end_index'] + 1]
110
+ if '\n' in code_str:
111
+ return code_str.strip('\n')
112
+ else:
113
+ return json.loads(function_args)['code']
114
+
115
+ except Exception as e:
116
+ return None
jupyter_backend.py ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import jupyter_client
2
+ import re
3
+
4
+
5
+ def delete_color_control_char(string):
6
+ ansi_escape = re.compile(r'(\x9B|\x1B\[)[0-?]*[ -\/]*[@-~]')
7
+ return ansi_escape.sub('', string)
8
+
9
+
10
+ class JupyterKernel:
11
+ def __init__(self, work_dir):
12
+ self.kernel_manager, self.kernel_client = jupyter_client.manager.start_new_kernel(kernel_name='python3')
13
+ self.work_dir = work_dir
14
+ self._create_work_dir()
15
+ self.available_functions = {
16
+ 'execute_code': self.execute_code,
17
+ 'python': self.execute_code
18
+ }
19
+
20
+ def execute_code_(self, code):
21
+ msg_id = self.kernel_client.execute(code)
22
+
23
+ # Get the output of the code
24
+ iopub_msg = self.kernel_client.get_iopub_msg()
25
+
26
+ all_output = []
27
+ while True:
28
+ if iopub_msg['msg_type'] == 'stream':
29
+ if iopub_msg['content'].get('name') == 'stdout':
30
+ output = iopub_msg['content']['text']
31
+ all_output.append(('stdout', output))
32
+ iopub_msg = self.kernel_client.get_iopub_msg()
33
+ elif iopub_msg['msg_type'] == 'execute_result':
34
+ if 'data' in iopub_msg['content']:
35
+ if 'text/plain' in iopub_msg['content']['data']:
36
+ output = iopub_msg['content']['data']['text/plain']
37
+ all_output.append(('execute_result_text', output))
38
+ if 'text/html' in iopub_msg['content']['data']:
39
+ output = iopub_msg['content']['data']['text/html']
40
+ all_output.append(('execute_result_html', output))
41
+ if 'image/png' in iopub_msg['content']['data']:
42
+ output = iopub_msg['content']['data']['image/png']
43
+ all_output.append(('execute_result_png', output))
44
+ if 'image/jpeg' in iopub_msg['content']['data']:
45
+ output = iopub_msg['content']['data']['image/jpeg']
46
+ all_output.append(('execute_result_jpeg', output))
47
+ iopub_msg = self.kernel_client.get_iopub_msg()
48
+ elif iopub_msg['msg_type'] == 'display_data':
49
+ if 'data' in iopub_msg['content']:
50
+ if 'text/plain' in iopub_msg['content']['data']:
51
+ output = iopub_msg['content']['data']['text/plain']
52
+ all_output.append(('display_text', output))
53
+ if 'text/html' in iopub_msg['content']['data']:
54
+ output = iopub_msg['content']['data']['text/html']
55
+ all_output.append(('display_html', output))
56
+ if 'image/png' in iopub_msg['content']['data']:
57
+ output = iopub_msg['content']['data']['image/png']
58
+ all_output.append(('display_png', output))
59
+ if 'image/jpeg' in iopub_msg['content']['data']:
60
+ output = iopub_msg['content']['data']['image/jpeg']
61
+ all_output.append(('display_jpeg', output))
62
+ iopub_msg = self.kernel_client.get_iopub_msg()
63
+ elif iopub_msg['msg_type'] == 'error':
64
+ if 'traceback' in iopub_msg['content']:
65
+ output = '\n'.join(iopub_msg['content']['traceback'])
66
+ all_output.append(('error', output))
67
+ iopub_msg = self.kernel_client.get_iopub_msg()
68
+ elif iopub_msg['msg_type'] == 'status' and iopub_msg['content'].get('execution_state') == 'idle':
69
+ break
70
+ else:
71
+ iopub_msg = self.kernel_client.get_iopub_msg()
72
+
73
+ return all_output
74
+
75
+ def execute_code(self, code):
76
+ text_to_gpt = []
77
+ content_to_display = self.execute_code_(code)
78
+ for mark, out_str in content_to_display:
79
+ if mark in ('stdout', 'execute_result_text', 'display_text'):
80
+ text_to_gpt.append(out_str)
81
+ elif mark in ('execute_result_png', 'execute_result_jpeg', 'display_png', 'display_jpeg'):
82
+ text_to_gpt.append('[image]')
83
+ elif mark == 'error':
84
+ text_to_gpt.append(delete_color_control_char(out_str))
85
+
86
+ return '\n'.join(text_to_gpt), content_to_display
87
+
88
+ def _create_work_dir(self):
89
+ # set work dir in jupyter environment
90
+ init_code = f"import os\n" \
91
+ f"if not os.path.exists('{self.work_dir}'):\n" \
92
+ f" os.mkdir('{self.work_dir}')\n" \
93
+ f"os.chdir('{self.work_dir}')\n" \
94
+ f"del os"
95
+ self.execute_code_(init_code)
96
+
97
+ def restart_jupyter_kernel(self):
98
+ self.kernel_client.shutdown()
99
+ self.kernel_manager, self.kernel_client = jupyter_client.manager.start_new_kernel(kernel_name='python3')
100
+ self._create_work_dir()
requirements.txt ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ notebook==6.5.4
2
+ gradio==3.39.0
3
+ openai==0.27.8
4
+ pandas
5
+ numpy
6
+ openpyxl
7
+ Pillow
8
+ opencv-python
9
+ PyPDF2
10
+ pdfminer.six
response_parser.py ADDED
@@ -0,0 +1,200 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from abc import ABCMeta, abstractmethod
2
+ from functional import *
3
+
4
+
5
+ class ChoiceStrategy(metaclass=ABCMeta):
6
+ def __init__(self, choice):
7
+ self.choice = choice
8
+ self.delta = choice['delta']
9
+
10
+ @abstractmethod
11
+ def support(self):
12
+ pass
13
+
14
+ @abstractmethod
15
+ def execute(self, bot_backend: BotBackend, history: List, whether_exit: bool):
16
+ pass
17
+
18
+
19
+ class RoleChoiceStrategy(ChoiceStrategy):
20
+
21
+ def support(self):
22
+ return 'role' in self.delta
23
+
24
+ def execute(self, bot_backend: BotBackend, history: List, whether_exit: bool):
25
+ bot_backend.set_assistant_role_name(assistant_role_name=self.delta['role'])
26
+ return history, whether_exit
27
+
28
+
29
+ class ContentChoiceStrategy(ChoiceStrategy):
30
+ def support(self):
31
+ return 'content' in self.delta and self.delta['content'] is not None
32
+
33
+ def execute(self, bot_backend: BotBackend, history: List, whether_exit: bool):
34
+ # null value of content often occur in function call:
35
+ # {
36
+ # "role": "assistant",
37
+ # "content": null,
38
+ # "function_call": {
39
+ # "name": "python",
40
+ # "arguments": ""
41
+ # }
42
+ # }
43
+ bot_backend.add_content(content=self.delta.get('content', ''))
44
+ history[-1][1] = bot_backend.content
45
+ return history, whether_exit
46
+
47
+
48
+ class NameFunctionCallChoiceStrategy(ChoiceStrategy):
49
+ def support(self):
50
+ return 'function_call' in self.delta and 'name' in self.delta['function_call']
51
+
52
+ def execute(self, bot_backend: BotBackend, history: List, whether_exit: bool):
53
+ function_dict = bot_backend.jupyter_kernel.available_functions
54
+ bot_backend.set_function_name(function_name=self.delta['function_call']['name'])
55
+ bot_backend.copy_current_bot_history(bot_history=history)
56
+ if bot_backend.function_name not in function_dict:
57
+ history.append(
58
+ [
59
+ None,
60
+ f'GPT attempted to call a function that does '
61
+ f'not exist: {bot_backend.function_name}\n '
62
+ ]
63
+ )
64
+ whether_exit = True
65
+
66
+ return history, whether_exit
67
+
68
+
69
+ class ArgumentsFunctionCallChoiceStrategy(ChoiceStrategy):
70
+
71
+ def support(self):
72
+ return 'function_call' in self.delta and 'arguments' in self.delta['function_call']
73
+
74
+ def execute(self, bot_backend: BotBackend, history: List, whether_exit: bool):
75
+ bot_backend.add_function_args_str(function_args_str=self.delta['function_call']['arguments'])
76
+
77
+ if bot_backend.function_name == 'python': # handle hallucinatory function calls
78
+ '''
79
+ In practice, we have noticed that GPT, especially GPT-3.5, may occasionally produce hallucinatory
80
+ function calls. These calls involve a non-existent function named `python` with arguments consisting
81
+ solely of raw code text (not a JSON format).
82
+ '''
83
+ temp_code_str = bot_backend.function_args_str
84
+ bot_backend.update_display_code_block(
85
+ display_code_block="\n🔴Working:\n```python\n{}\n```".format(temp_code_str)
86
+ )
87
+ history = copy.deepcopy(bot_backend.bot_history)
88
+ history[-1][1] += bot_backend.display_code_block
89
+ else:
90
+ temp_code_str = parse_json(function_args=bot_backend.function_args_str, finished=False)
91
+ if temp_code_str is not None:
92
+ bot_backend.update_display_code_block(
93
+ display_code_block="\n🔴Working:\n```python\n{}\n```".format(
94
+ temp_code_str
95
+ )
96
+ )
97
+ history = copy.deepcopy(bot_backend.bot_history)
98
+ history[-1][1] += bot_backend.display_code_block
99
+
100
+ return history, whether_exit
101
+
102
+
103
+ class FinishReasonChoiceStrategy(ChoiceStrategy):
104
+ def support(self):
105
+ return self.choice['finish_reason'] is not None
106
+
107
+ def execute(self, bot_backend: BotBackend, history: List, whether_exit: bool):
108
+ function_dict = bot_backend.jupyter_kernel.available_functions
109
+
110
+ if bot_backend.content:
111
+ bot_backend.add_gpt_response_content_message()
112
+
113
+ bot_backend.update_finish_reason(finish_reason=self.choice['finish_reason'])
114
+ if bot_backend.finish_reason == 'function_call':
115
+ try:
116
+
117
+ code_str = self.get_code_str(bot_backend)
118
+
119
+ bot_backend.update_display_code_block(
120
+ display_code_block="\n🟢Working:\n```python\n{}\n```".format(code_str)
121
+ )
122
+ history = copy.deepcopy(bot_backend.bot_history)
123
+ history[-1][1] += bot_backend.display_code_block
124
+
125
+ # function response
126
+ text_to_gpt, content_to_display = function_dict[
127
+ bot_backend.function_name
128
+ ](code_str)
129
+
130
+ # add function call to conversion
131
+ bot_backend.add_function_call_response_message(function_response=text_to_gpt, save_tokens=True)
132
+
133
+ add_function_response_to_bot_history(
134
+ content_to_display=content_to_display, history=history, unique_id=bot_backend.unique_id
135
+ )
136
+
137
+ except json.JSONDecodeError:
138
+ history.append(
139
+ [None, f"GPT generate wrong function args: {bot_backend.function_args_str}"]
140
+ )
141
+ whether_exit = True
142
+ return history, whether_exit
143
+
144
+ except Exception as e:
145
+ history.append([None, f'Backend error: {e}'])
146
+ whether_exit = True
147
+ return history, whether_exit
148
+
149
+ bot_backend.reset_gpt_response_log_values(exclude=['finish_reason'])
150
+
151
+ return history, whether_exit
152
+
153
+ @staticmethod
154
+ def get_code_str(bot_backend):
155
+ if bot_backend.function_name == 'python':
156
+ code_str = bot_backend.function_args_str
157
+ else:
158
+ code_str = parse_json(function_args=bot_backend.function_args_str, finished=True)
159
+ if code_str is None:
160
+ raise json.JSONDecodeError
161
+ return code_str
162
+
163
+
164
+ class ChoiceHandler:
165
+ strategies = [
166
+ RoleChoiceStrategy, ContentChoiceStrategy, NameFunctionCallChoiceStrategy,
167
+ ArgumentsFunctionCallChoiceStrategy, FinishReasonChoiceStrategy
168
+ ]
169
+
170
+ def __init__(self, choice):
171
+ self.choice = choice
172
+
173
+ def handle(self, bot_backend: BotBackend, history: List, whether_exit: bool):
174
+ for Strategy in self.strategies:
175
+ strategy_instance = Strategy(choice=self.choice)
176
+ if not strategy_instance.support():
177
+ continue
178
+ history, whether_exit = strategy_instance.execute(
179
+ bot_backend=bot_backend,
180
+ history=history,
181
+ whether_exit=whether_exit
182
+ )
183
+ return history, whether_exit
184
+
185
+
186
+ def parse_response(chunk, history, bot_backend: BotBackend):
187
+ """
188
+ :return: history, whether_exit
189
+ """
190
+ whether_exit = False
191
+ if chunk['choices']:
192
+ choice = chunk['choices'][0]
193
+ choice_handler = ChoiceHandler(choice=choice)
194
+ history, whether_exit = choice_handler.handle(
195
+ history=history,
196
+ bot_backend=bot_backend,
197
+ whether_exit=whether_exit
198
+ )
199
+
200
+ return history, whether_exit