AYS / app.py
Geethika Isuru Sampath
NewWithAPI
5777026
import gradio as gr
import requests
import os
from dotenv import load_dotenv
# Load environment variables from .env file
load_dotenv()
API_URL = "https://api-inference.huggingface.co/models/deepset/roberta-base-squad2"
headers = {"Authorization": f"Bearer {os.environ['HUGGINGFACE_API_KEY']}"}
# Initialize an empty list to store conversation history
conversation_history = []
def query(payload):
response = requests.post(API_URL, headers=headers, json=payload)
return response.json()
def answer_question(question, context):
# Use the Hugging Face Inference API to get the answer
output = query({
"inputs": {
"question": question,
"context": context
},
})
# Extract the answer from the API response
answer = output.get('answer', 'Sorry, I couldn\'t find an answer.')
# Add the Q&A to the conversation history
conversation_history.append(f"Human: {question}")
conversation_history.append(f"AI: {answer}")
# Return the full conversation history as a string
return "\n".join(conversation_history)
# Create the Gradio interface
iface = gr.Interface(
fn=answer_question,
inputs=[
gr.Textbox(label="Question", placeholder="Ask your question here..."),
gr.Textbox(label="Context", placeholder="Provide context here...", lines=5)
],
outputs=gr.Textbox(label="Conversation", lines=10),
title="IEEE USJ Student Branch Copilot",
description="Ask questions based on the provided context.",
theme="default"
)
# Launch the app
iface.launch()