Letsch22 commited on
Commit
a724f18
·
1 Parent(s): 1c02a04

Better assistant caching

Browse files
Files changed (1) hide show
  1. app.py +65 -54
app.py CHANGED
@@ -1,7 +1,8 @@
1
  import os
2
  import urllib
3
- from time import sleep
4
  import urllib.request
 
 
5
 
6
  import gradio as gr
7
  import openai
@@ -11,70 +12,32 @@ load_dotenv()
11
 
12
  class MockInterviewer:
13
 
14
- def __init__(self):
15
- self.client = openai.OpenAI(api_key=os.environ['OPENAI_API_KEY'])
16
- self.job_role = ''
17
- self.company = ''
18
- self.assistant_id = ''
19
-
20
- def create_files(self, company):
21
- if company.lower() == 'amazon':
22
- url = 'https://www.aboutamazon.com/about-us/leadership-principles'
23
- filename = 'leadership_principles.html'
24
- else:
25
- return []
26
-
27
- filename, headers = urllib.request.urlretrieve(url, filename)
28
- with open(filename, 'rb') as file:
29
- assistant_file = self.client.files.create(file=file, purpose='assistants')
30
- file_ids = [assistant_file.id]
31
- os.remove(filename)
32
- return file_ids
33
-
34
- def init_assistant(self, job_role, company):
35
- if not job_role and not company:
36
- raise gr.Error('Job Role and Company are required fields.')
37
- if not job_role:
38
- raise gr.Error('Job Role is a required field.')
39
- if not company:
40
- raise gr.Error('Company is a required field.')
41
-
42
- if job_role != self.job_role or company != self.company:
43
- file_ids = self.create_files(company)
44
 
45
- assistant = self.client.beta.assistants.create(
46
- name='Mock Interviewer',
47
- instructions=f'You are an AI mock interviewer for {job_role} roles at {company}. Please make it obvious this is your purpose. If you have been provided a file, use it as an interview guide.',
48
- model='gpt-4-0125-preview',
49
- tools=[
50
- {
51
- 'type': 'retrieval' # This adds the knowledge base as a tool
52
- }
53
- ],
54
- file_ids=file_ids)
55
-
56
- self.assistant_id = assistant.id
57
-
58
- def chat(self, usr_message, history, job_role, company):
59
  print('Started function')
60
- thread = self.client.beta.threads.create()
61
- user_input = usr_message['text']
 
 
62
 
63
- self.init_assistant(job_role, company)
64
 
65
  # Add the user's message to the thread
66
- self.client.beta.threads.messages.create(thread_id=thread.id,
67
  role="user",
68
  content=user_input)
69
  print('Client made')
70
  # Run the Assistant
71
- run = self.client.beta.threads.runs.create(thread_id=thread.id,
72
- assistant_id=self.assistant_id)
73
  print('Run created')
74
 
75
  # Check if the Run requires action (function call)
76
  while True:
77
- run_status = self.client.beta.threads.runs.retrieve(thread_id=thread.id,
78
  run_id=run.id)
79
  print(f"Run status: {run_status.status}")
80
  if run_status.status == 'completed':
@@ -83,12 +46,60 @@ class MockInterviewer:
83
  sleep(1) # Wait for a second before checking again
84
 
85
  # Retrieve and return the latest message from the assistant
86
- messages = self.client.beta.threads.messages.list(thread_id=thread.id)
87
  response = messages.data[0].content[0].text.value
88
 
89
  print(f"Assistant response: {response}") # Debugging line
90
  #return json.dumps({"response": response})
91
- yield response
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
92
 
93
  # Creating the Gradio interface
94
  with gr.Blocks() as demo:
 
1
  import os
2
  import urllib
 
3
  import urllib.request
4
+ from time import sleep
5
+ from typing import Dict, List, Generator
6
 
7
  import gradio as gr
8
  import openai
 
12
 
13
  class MockInterviewer:
14
 
15
+ def __init__(self) -> None:
16
+ self._client = openai.OpenAI(api_key=os.environ['OPENAI_API_KEY'])
17
+ self._assistant_id_cache: Dict[str, str] = {}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
 
19
+ def chat(self, usr_message: Dict, history: List[List], job_role: str, company: str) -> Generator:
 
 
 
 
 
 
 
 
 
 
 
 
 
20
  print('Started function')
21
+ self._validate_fields(job_role, company)
22
+
23
+ thread = self._client.beta.threads.create()
24
+ user_input = usr_message.get('text')
25
 
26
+ assistant_id = self._init_assistant(job_role, company)
27
 
28
  # Add the user's message to the thread
29
+ self._client.beta.threads.messages.create(thread_id=thread.id,
30
  role="user",
31
  content=user_input)
32
  print('Client made')
33
  # Run the Assistant
34
+ run = self._client.beta.threads.runs.create(thread_id=thread.id,
35
+ assistant_id=assistant_id)
36
  print('Run created')
37
 
38
  # Check if the Run requires action (function call)
39
  while True:
40
+ run_status = self._client.beta.threads.runs.retrieve(thread_id=thread.id,
41
  run_id=run.id)
42
  print(f"Run status: {run_status.status}")
43
  if run_status.status == 'completed':
 
46
  sleep(1) # Wait for a second before checking again
47
 
48
  # Retrieve and return the latest message from the assistant
49
+ messages = self._client.beta.threads.messages.list(thread_id=thread.id)
50
  response = messages.data[0].content[0].text.value
51
 
52
  print(f"Assistant response: {response}") # Debugging line
53
  #return json.dumps({"response": response})
54
+ yield response
55
+
56
+ def _validate_fields(self, job_role: str, company: str) -> None:
57
+ if not job_role and not company:
58
+ raise gr.Error('Job Role and Company are required fields.')
59
+ if not job_role:
60
+ raise gr.Error('Job Role is a required field.')
61
+ if not company:
62
+ raise gr.Error('Company is a required field.')
63
+
64
+ def _create_files(self, company: str) -> List[str]:
65
+ if company.lower() == 'amazon':
66
+ url = 'https://www.aboutamazon.com/about-us/leadership-principles'
67
+ filename = 'leadership_principles.html'
68
+ else:
69
+ return []
70
+
71
+ filename, headers = urllib.request.urlretrieve(url, filename)
72
+ with open(filename, 'rb') as file:
73
+ assistant_file = self._client.files.create(file=file, purpose='assistants')
74
+ file_ids = [assistant_file.id]
75
+ os.remove(filename)
76
+ return file_ids
77
+
78
+ def _init_assistant(self, job_role: str, company: str) -> str:
79
+ cache_key = self._create_cache_key(job_role, company)
80
+ if cache_key in self._assistant_id_cache:
81
+ print(f'Fetched from cache for key {cache_key}')
82
+ return self._assistant_id_cache.get(cache_key)
83
+ else:
84
+ print(f'Initializing new assistant for key {cache_key}')
85
+ file_ids = self._create_files(company)
86
+
87
+ assistant = self._client.beta.assistants.create(
88
+ name='Mock Interviewer',
89
+ instructions=f'You are an AI mock interviewer for {job_role} roles at {company}. Please make it obvious this is your purpose. If you have been provided a file, use it as an interview guide.',
90
+ model='gpt-4-0125-preview',
91
+ tools=[
92
+ {
93
+ 'type': 'retrieval' # This adds the knowledge base as a tool
94
+ }
95
+ ],
96
+ file_ids=file_ids)
97
+
98
+ self._assistant_id_cache[cache_key] = assistant.id
99
+ return assistant.id
100
+
101
+ def _create_cache_key(self, job_role: str, company: str) -> str:
102
+ return f'{job_role.lower()}+{company.lower()}'
103
 
104
  # Creating the Gradio interface
105
  with gr.Blocks() as demo: