File size: 4,853 Bytes
e7e6723 6282e33 e7e6723 715a7b6 e7e6723 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 | import os
import json
import logging
from typing import Optional
import gradio as gr
from utils.response_manager import ResponseManager
class ChatbotInterface:
def __init__(self,
config_path: str = 'config/gradio_config.json',
model: str = "gpt-4o-mini",
temperature: float = 0,
max_output_tokens: int = 800,
max_num_results: int = 15,
vector_store_id: Optional[str] = None,
api_key: Optional[str] = None,
meta_prompt_file: Optional[str] = None):
"""
Initialize the ChatbotInterface with configuration and custom parameters for ResponseManager.
"""
self.config = self.load_config(config_path)
self.title = self.config["chatbot_title"]
self.description = self.config["chatbot_description"]
# Use a placeholder in the textbox for a unified look.
self.input_placeholder = "How can the CIS agent help you?"
self.output_label = self.config["chatbot_output_label"]
self.submit_button = self.config["chatbot_submit_button"]
try:
self.response_manager = ResponseManager(
model=model,
temperature=temperature,
max_output_tokens=max_output_tokens,
max_num_results=max_num_results,
vector_store_id=vector_store_id,
api_key=api_key,
meta_prompt_file=meta_prompt_file
)
logging.info(
"ChatbotInterface initialized with the following parameters:\n"
f" - Model: {model}\n"
f" - Temperature: {temperature}\n"
f" - Max Output Tokens: {max_output_tokens}\n"
f" - Max Number of Results: {max_num_results}\n"
)
except Exception as e:
logging.error(f"Failed to initialize ResponseManager: {e}")
raise
@staticmethod
def load_config(config_path: str) -> dict:
"""
Load the configuration for Gradio GUI interface from the JSON file.
"""
logging.info(f"Loading configuration from {config_path}...")
if not os.path.exists(config_path):
logging.error(f"Configuration file not found: {config_path}")
raise FileNotFoundError(f"Configuration file not found: {config_path}")
with open(config_path, 'r') as config_file:
config = json.load(config_file)
required_keys = [
"chatbot_title",
"chatbot_description",
"chatbot_input_label",
"chatbot_input_placeholder",
"chatbot_output_label",
"chatbot_reset_button",
"chatbot_submit_button"
]
for key in required_keys:
if key not in config:
logging.error(f"Missing required configuration key: {key}")
raise ValueError(f"Missing required configuration key: {key}")
logging.info("Configuration loaded successfully.")
return config
def create_interface(self) -> gr.Blocks:
"""
Create the Gradio Blocks interface that displays a single container including both
the text input and a small arrow submit button. The interface will clear the text input
after each message is submitted.
"""
logging.info("Creating Gradio interface...")
with gr.Blocks() as demo: #css=css
# Title and description area.
gr.Markdown(f"## {self.title}\n{self.description}")
# Chatbot output area.
chatbot_output = gr.Chatbot(label=self.output_label, type="messages")
# Define a local function to reset history
def clear_history() -> list:
"""
Clear the history.
:return: An empty list to clear the history.
"""
return []
# Define a local function to process input
def process_input(user_message, chat_history):
"""
Call generate_response with the user's message and chat history.
Return a tuple with the updated chat history and an empty string to clear the input.
"""
updated_history = self.response_manager.generate_response(user_message, chat_history)
return updated_history#, ""
user_input = gr.ChatInterface(
process_input,
chatbot=chatbot_output,
type="messages"
)
chatbot_output.clear(clear_history, outputs=[chatbot_output])
logging.info("Gradio interface created successfully.")
return demo
|