SivaMallikarjun's picture
Update app.py
5ec0f12 verified
import gradio as gr
import os
import requests
# Set your Hugging Face API key
HF_API_KEY = "HF_API_KEY"
API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct"
headers = {"Authorization": f"Bearer {HF_API_KEY}"}
def query_huggingface(payload):
response = requests.post(API_URL, headers=headers, json=payload)
return response.json()
def ai_tutor(question):
payload = {
"inputs": question,
"parameters": {"max_new_tokens": 100, "temperature": 0.7}
}
result = query_huggingface(payload)
# Extract and format the response
if "error" in result:
return "Sorry, I couldn't process the request at the moment."
return result[0]["generated_text"]
# Gradio Chatbot UI
with gr.Blocks() as demo:
gr.Markdown("## 🤖 AI-Powered Mechanical Engineering Learning Chatbot")
chatbot = gr.Chatbot()
input_text = gr.Textbox(label="Ask a Question")
submit_btn = gr.Button("Submit")
def respond(message, history):
response = ai_tutor(message)
history.append((message, response))
return history, ""
submit_btn.click(respond, inputs=[input_text, chatbot], outputs=[chatbot, input_text])
demo.launch()