ApiTest-ChatBot / app.py
abaansohail131's picture
Update app.py
2025b65 verified
import gradio as gr
from transformers import GPT2LMHeadModel, GPT2Tokenizer
# Hugging Face model repository name
MODEL_NAME = "fine_tuned_gpt2"
# Load the model and tokenizer from Hugging Face
def load_model():
print("Loading model from Hugging Face...")
tokenizer = GPT2Tokenizer.from_pretrained(MODEL_NAME)
model = GPT2LMHeadModel.from_pretrained(MODEL_NAME)
print("Model loaded successfully.")
return model, tokenizer
# Initialize model and tokenizer
model, tokenizer = load_model()
# Functions for question generation and feedback
def generate_question(response, history):
"""
Generate the next question based on the user's response and previous Q&A history.
"""
input_text = ""
for qa in history:
input_text += f"Question: {qa['question']}\nAnswer: {qa['answer']}\n"
input_text += f"Response: {response}\nNext Question:"
inputs = tokenizer.encode(input_text, return_tensors="pt", truncation=True, max_length=512)
outputs = model.generate(inputs, max_length=200, num_return_sequences=1, pad_token_id=tokenizer.eos_token_id)
generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
return generated_text
def get_feedback(session_data):
"""
Generate feedback for the session data.
"""
input_text = "Session Feedback:\n"
for qa in session_data:
input_text += f"Question: {qa['question']}\nAnswer: {qa['answer']}\n"
input_text += f"\nScore:"
inputs = tokenizer.encode(input_text, return_tensors="pt", truncation=True, max_length=512)
outputs = model.generate(inputs, max_length=500, num_return_sequences=1, pad_token_id=tokenizer.eos_token_id)
feedback = tokenizer.decode(outputs[0], skip_special_tokens=True) # Decode and store the output
return feedback
# Gradio Interface Functions
def question_interface(response, history):
"""
Gradio interface function for question generation.
"""
try:
history = eval(history) # Convert string input to list
return generate_question(response, history)
except Exception as e:
return f"Error: {str(e)}"
def feedback_interface(session_data):
"""
Gradio interface function for feedback generation.
"""
try:
session_data = eval(session_data) # Convert string input to list
return get_feedback(session_data)
except Exception as e:
return f"Error: {str(e)}"
# Gradio Interface Setup
question_iface = gr.Interface(
fn=question_interface,
inputs=[
gr.Textbox(label="User Response"),
gr.Textbox(label="History (list of Q&A in string format)"),
],
outputs="text",
title="AI Interview Assistant: Question Generator",
description="Provide a response and Q&A history to generate the next interview question.",
api_name="generate_question" # Add this
)
feedback_iface = gr.Interface(
fn=feedback_interface,
inputs=gr.Textbox(label="Session Data (list of Q&A in string format)"),
outputs="text",
title="AI Interview Assistant: Feedback Generator",
description="Provide session data to get feedback on your responses.",
api_name="get_feedback" # Add this
)
# Combine the Interfaces
iface = gr.TabbedInterface(
interface_list=[question_iface, feedback_iface],
tab_names=["Generate Question", "Get Feedback"]
)
# Launch the Gradio App
if __name__ == "__main__":
iface.launch(server_name="0.0.0.0", server_port=7860)