consy commited on
Commit
6729300
·
verified ·
1 Parent(s): cb961af

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +31 -9
app.py CHANGED
@@ -117,24 +117,46 @@ top_results = get_top_chunks('What causes skin cancer?',chunk_embeddings, cleane
117
  print(top_results)# Print the top results
118
 
119
  #the og code from gen ai lesson
120
- client = InferenceClient("microsoft/phi-4")
121
  # name of llm chatbot accessed ^^ or can use ' microsoft/phi-4 that's connected to the microsoft phi gen model
122
 
123
- def respond(message,history):
124
 
125
- info = get_top_chunks(message, chunk_embeddings, cleaned_chunks)
126
- messages = [{'role': 'system','content':f'You are a friendly chatbot using {info} to answer questions. You are always willing to help and want the best for the user. You need to emphasize that you are not a medical professional at the end of the message, but you are here to help to the best of your ability. Be confident and comforting to the users when helping them. In your response add suggestions for acouple follow up questions to further the conversation with the chatbot.'}]
127
  #use string interporlation with variable info
128
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
129
  if history:
130
  messages.extend(history)
 
131
 
132
- messages.append({'role': 'user','content': message})
133
-
134
- response = client.chat_completion(messages, max_tokens = 500, top_p=0.8)
135
- #max tokens is a parameter to determine how long the message should be
 
 
 
 
136
 
137
- return response['choices'][0]['message']['content'].strip()
138
 
139
  with gr.Blocks(theme= theme) as chatbot:
140
  with gr.Row(scale = 1):
 
117
  print(top_results)# Print the top results
118
 
119
  #the og code from gen ai lesson
120
+ #client = InferenceClient("microsoft/phi-4")
121
  # name of llm chatbot accessed ^^ or can use ' microsoft/phi-4 that's connected to the microsoft phi gen model
122
 
123
+ #def respond(message,history):
124
 
125
+ # info = get_top_chunks(message, chunk_embeddings, cleaned_chunks)
126
+ # messages = [{'role': 'system','content':f'You are a friendly chatbot using {info} to answer questions. You are always willing to help and want the best for the user. You need to emphasize that you are not a medical professional at the end of the message, but you are here to help to the best of your ability. Be confident and comforting to the users when helping them. In your response add suggestions for acouple follow up questions to further the conversation with the chatbot.'}]
127
  #use string interporlation with variable info
128
 
129
+ # if history:
130
+ # messages.extend(history)
131
+ #
132
+ # messages.append({'role': 'user','content': message})
133
+
134
+ # response = client.chat_completion(messages, max_tokens = 500, top_p=0.8)
135
+ #max tokens is a parameter to determine how long the message should be
136
+
137
+ # return response['choices'][0]['message']['content'].strip()
138
+
139
+ from huggingface_hub import InferenceClient
140
+
141
+ # Create client without passing model name here
142
+ client = InferenceClient()
143
+
144
+ def respond(message, history):
145
+ info = get_top_chunks(message, chunk_embeddings, cleaned_chunks)
146
+ messages = [{'role': 'system', 'content': f'You are a friendly chatbot using {info} ...'}]
147
  if history:
148
  messages.extend(history)
149
+ messages.append({'role': 'user', 'content': message})
150
 
151
+ # Use the chat completions API
152
+ response = client.chat.completions.create(
153
+ model="microsoft/phi-4",
154
+ messages=messages,
155
+ max_tokens=500,
156
+ top_p=0.8
157
+ )
158
+ return response.choices[0].message.content.strip()
159
 
 
160
 
161
  with gr.Blocks(theme= theme) as chatbot:
162
  with gr.Row(scale = 1):