ScoobyAI_KWK / app.py
AnaviJoshi's picture
Update app.py
4adb088 verified
import gradio as gr
from sentence_transformers import SentenceTransformer
import torch
from huggingface_hub import InferenceClient
#import os
#HF_TOKEN = os.environ.get("HF_TOKEN")
with open("knowledge.txt", "r", encoding="utf-8") as file:
knowledge = file.read()
cleaned_text = knowledge.strip()
# cleaning up the text
chunks = cleaned_text.split("\n")
# separating the text into one sentence chunks
cleaned_chunks = []
# creating a empty list called cleaned_chunks
for chunk in chunks:
# for every chunk in the chunks list,
stripped_chunk = chunk.strip()
# the chunk is getting stripped
if stripped_chunk:
#if the chunk is not empty then it is being appended to the cleaned chunk list.
cleaned_chunks.append(stripped_chunk)
model = SentenceTransformer('all-MiniLM-L6-v2')
chunk_embeddings = model.encode(cleaned_chunks, convert_to_tensor=True)
# encode the model, pass through my clean chunks and convert to vector embeddings (not arrays)
def get_top_chunks(query): # store a function that gets the most relevant_info and make it return a variable “relevant_info” then
# create my function taking query as my parameter
query_embedding = model.encode(query, convert_to_tensor=True)
# turning my query to a vector embedding for comparison
query_embedding_normalized = query_embedding / query_embedding.norm()
# normlaize my query to 1: allows for comparison of meaning
chunk_embeddings_normalized = chunk_embeddings / chunk_embeddings.norm(dim=1, keepdim=True)
# normalizing chunks for comparison of meaning
similarities = torch.matmul(chunk_embeddings_normalized, query_embedding_normalized)
# using my matmul(matrix multiplication method to compare query to chunks)
top_indices = torch.topk(similarities, k=3).indices
# get the indices of the chunks that are most similar to my query
top_chunks = []
for i in top_indices:
chunk = chunks[i]
# for each index number in top_indices, get back the text
top_chunks.append(chunk) # adding the sentences that are associated with the top indices to the list
return top_chunks
client = InferenceClient("google/gemma-3-27b-it")
def respond(message, history):
knowledge_base = get_top_chunks(message)
system_message = f"You are a helpful chatbot named scooby, kinda like the cartoon character but not too much like it and loves helping pet owner, and doesn't talk about specific pet companies or health issues until prompted:{knowledge_base}"
messages = [{"role": "system", "content": system_message}]
if history:
messages.extend(history)
messages.append({"role": "user", "content": message})
response = ""
for messages in client.chat_completion(messages,max_tokens = 300, stream = True):
token = messages.choices[0].delta.content
response += token
yield response
theme = theme = gr.themes.Soft(
primary_hue="orange",
secondary_hue="orange",
neutral_hue="emerald",
)
welcome_message = """
# Welcome to ScoobyAI
## We understand that it is difficult to take care of pets, especially after a long day of school or work. That's why we created Scooby, your chatbot assistant for all pet health needs!
"""
topics = """
### Scooby is happy to help you over these topics!
- Explaining pet issues
- Creating feeding schedules
- Creating recipes for pets based on dietary needs
"""
#chatbot = gr.ChatInterface(respond, type = "messages", theme)
with gr.Blocks(theme=theme) as demo:
gr.Image(value="scooby.png", height=450, width = 1536, show_label=False, show_share_button = False, show_download_button = False, container=False)
gr.Markdown(welcome_message)
with gr.Row():
with gr.Column():
gr.Markdown(topics)
with gr.Tabs():
with gr.TabItem("Main Page"):
with gr.Row():
text_input = gr.Textbox(label="Ask Scooby a question about your pet")
# image_input = gr.Image(type="pil", label="Upload an image of your pet (optional)")
output = gr.Textbox(label="Scooby's Answer")
submit_btn = gr.Button("Ask Scooby")
submit_btn.click(fn=respond, inputs=text_input, outputs=output)
with gr.TabItem("🍲 Recipes"):
gr.Markdown("Ask Scooby about recipes here!")
gr.ChatInterface(fn=respond,title="Pet Recipe Assistant 🍽️",
textbox=gr.Textbox(placeholder="What does your pet need a recipe for?",
label="Recipe Request"))
demo.launch(debug=True)