Spaces:
Runtime error
Runtime error
File size: 3,160 Bytes
210592a 049f21c 194730e 210592a f9d2275 7759e89 a811070 057f7cb f9d2275 ab01886 51ead22 ab01886 ee2c029 ab01886 8444bba 6be7fac ab01886 f9d2275 b00fdfe f9d2275 dcf2468 55b55ad d592c9f 55b55ad df8837b 55b55ad 39c35e8 49f302c f891346 dcf2468 f891346 dcf2468 39c35e8 23c550c 55b55ad b00fdfe 23c550c ae5eea4 23c550c b00fdfe 23c550c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 |
from huggingface_hub import InferenceClient
import gradio as gr
import random
client = InferenceClient("google/gemma-3-27b-it")
#this is where you change the LLM
def respond(message,history):
if "nearest obgyn" in message.lower():
# Return a custom response (for UI to act on)
return "Sure! Please click on the 'OBGYN Finder' tab above to get started. 🩺", history
messages = [{"role": "system", "content": "You are a mean chatbot."}]
if history:
messages.extend(history)
messages.append({"role": "user", "content": message})
response = ""
for message in client.chat_completion(
messages,
max_tokens=50,
stream=True
):
token = message.choices[0].delta.content
response += token
yield response
#temperature=.9,
#top_p=.7) #changes the length of message
def echo(message, history):
choices = ["so true", "shut up", "you need help", "ts pmo sybau"] #change personality
#use random to select choices
chat_answer = random.choice(choices)
return chat_answer
def show_info(topic):
responses = {
"Puberty": "Puberty is when your body begins to change into an adult body.",
"Periods": "A period is a natural process where the body sheds the uterine lining.",
"Hormones": "Hormones are chemicals that regulate body functions, mood, and growth.",
"Self-care": "Self-care means taking time to rest, eat well, and manage stress."
}
return responses.get(topic, "Select a topic to see more info.")
with gr.Blocks() as demo:
gr.Markdown("## 💬 Mean Chatbot")
user_input = gr.Textbox(label="Your Question", placeholder="Type something...", lines=2)
output = gr.Textbox(label="Bot's Answer", interactive=False)
state = gr.State([])
def show_info
def handle_submit(message, history):
response = ""
for partial in respond(message, history):
response = partial
yield response
user_input.submit(
fn=handle_submit,
inputs=[user_input, state],
outputs=output
)
with gr.Tab("Tab 1"):
gr.Markdown("### This is Tab 1")
gr.HTML("""
<a href="https://drive.google.com/file/d/1_KNELAUDLLidwAT3fs2JBuO1yPgMGoDv/view" target="_blank">
<button style="font-size:16px;padding:10px 20px;margin-top:10px;">
📄 Open Puberty Guide PDF
</button>
</a>
""")
with gr.Tab("OBGYN Finder") as obgyn_tab:
gr.Markdown("### Find a Nearby OBGYN")
location_dropdown = gr.Dropdown(
choices=["New York", "California", "Texas", "Florida"],
label="Choose your state"
)
result_text = gr.Textbox(label="Nearby Clinics")
# Show response when a choice is made
dropdown.change(fn=show_info, inputs=dropdown, outputs=output)
demo.launch(debug=True)
#added code for blocks
#chatbot = gr.ChatInterface(respond, type = "messages")
#chatbot.launch()
#chatbot.launch(debug=True) |