consy commited on
Commit
7d47e29
·
verified ·
1 Parent(s): 1fdb6cd

added more comments

Browse files
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -5,7 +5,7 @@ import random
5
  from huggingface_hub import InferenceClient
6
 
7
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
8
- # name of llm chatbot accessed ^^
9
 
10
  def respond(message,history):
11
 
@@ -18,6 +18,7 @@ def respond(message,history):
18
  messages.append({'role': 'user','content': message})
19
 
20
  response = client.chat_completion(messages, max_tokens = 100)
 
21
 
22
  return response['choices'][0]['message']['content'].strip()
23
 
 
5
  from huggingface_hub import InferenceClient
6
 
7
  client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
8
+ # name of llm chatbot accessed ^^ or can use ' microsoft/phi-4 that's connected to the microsoft phi gen model
9
 
10
  def respond(message,history):
11
 
 
18
  messages.append({'role': 'user','content': message})
19
 
20
  response = client.chat_completion(messages, max_tokens = 100)
21
+ #max tokens is a parameter to determine how long the message should be
22
 
23
  return response['choices'][0]['message']['content'].strip()
24