AnaviJoshi commited on
Commit
907704a
·
verified ·
1 Parent(s): e7cfe0d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +31 -10
app.py CHANGED
@@ -58,23 +58,44 @@ def get_top_chunks(query): # store a function that gets the most relevant_info a
58
  client = InferenceClient("google/gemma-3-27b-it")
59
 
60
 
61
- def respond(message, history):
62
-
63
- system_message = "You are a helpful chatbot named scooby, kinda like the cartoon character but not too much like it, who knows alot about pets and their diets and loves helping pet owners, and can only answer questions about pets"
64
- messages = [{"role": "system", "content": system_message}]
65
 
66
 
67
 
68
- if history:
69
- messages.extend(history)
70
 
71
- messages.append({"role": "user", "content": message})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
72
  response = ""
73
- for messages in client.chat_completion(messages,max_tokens = 2500, stream = True):
74
- token = messages.choices[0].delta.content
75
- response += token
76
  yield response
77
 
 
 
78
  #theme = gr.themes.Origin(primary_hue="orange",secondary_hue="indigo", neutral_hue="teal")
79
 
80
 
 
58
  client = InferenceClient("google/gemma-3-27b-it")
59
 
60
 
61
+ #def respond(message, history):
62
+ #
63
+ # system_message = "You are a helpful chatbot named scooby, kinda like the cartoon character but not too much like it, who knows alot about pets and their diets and loves helping pet owners, and can only answer questions about pets"
64
+ # messages = [{"role": "system", "content": system_message}]
65
 
66
 
67
 
68
+ # if history:
69
+ # messages.extend(history)
70
 
71
+ # messages.append({"role": "user", "content": message})
72
+ # response = ""
73
+ # for messages in client.chat_completion(messages,max_tokens = 2500, stream = True):
74
+ # token = messages.choices[0].delta.content
75
+ # response += token
76
+ # yield response
77
+
78
+ def respond(message, history):
79
+ # Get relevant information from knowledge base
80
+ relevant_chunks = get_top_chunks(message)
81
+ context = "\n".join(relevant_chunks)
82
+
83
+ # Build prompt
84
+ prompt = (
85
+ "You are Scooby, a helpful chatbot that only answers questions about pets and their diets.\n"
86
+ "Use the following relevant information to help answer the user's question:\n\n"
87
+ f"{context}\n\n"
88
+ f"User: {message}\nScooby:"
89
+ )
90
+
91
+ # Stream response
92
  response = ""
93
+ for chunk in client.text_generation(prompt=prompt, max_new_tokens=300, stream=True):
94
+ response += chunk.token.text
 
95
  yield response
96
 
97
+
98
+
99
  #theme = gr.themes.Origin(primary_hue="orange",secondary_hue="indigo", neutral_hue="teal")
100
 
101