| | import os |
| | import json |
| | import logging |
| | from typing import Optional |
| | import gradio as gr |
| | from utils.response_manager import ResponseManager |
| |
|
| | class ChatbotInterface: |
| | def __init__(self, |
| | config_path: str = 'config/gradio_config.json', |
| | model: str = "gpt-4o-mini", |
| | temperature: float = 0, |
| | max_output_tokens: int = 800, |
| | max_num_results: int = 15, |
| | vector_store_id: Optional[str] = None, |
| | api_key: Optional[str] = None, |
| | meta_prompt_file: Optional[str] = None): |
| | """ |
| | Initialize the ChatbotInterface with configuration and custom parameters for ResponseManager. |
| | """ |
| | self.config = self.load_config(config_path) |
| | |
| | self.title = self.config["chatbot_title"] |
| | self.description = self.config["chatbot_description"] |
| |
|
| | |
| | self.input_placeholder = "How can the CIS agent help you?" |
| |
|
| | self.output_label = self.config["chatbot_output_label"] |
| | self.submit_button = self.config["chatbot_submit_button"] |
| |
|
| | try: |
| | self.response_manager = ResponseManager( |
| | model=model, |
| | temperature=temperature, |
| | max_output_tokens=max_output_tokens, |
| | max_num_results=max_num_results, |
| | vector_store_id=vector_store_id, |
| | api_key=api_key, |
| | meta_prompt_file=meta_prompt_file |
| | ) |
| | |
| | logging.info( |
| | "ChatbotInterface initialized with the following parameters:\n" |
| | f" - Model: {model}\n" |
| | f" - Temperature: {temperature}\n" |
| | f" - Max Output Tokens: {max_output_tokens}\n" |
| | f" - Max Number of Results: {max_num_results}\n" |
| | ) |
| | except Exception as e: |
| | logging.error(f"Failed to initialize ResponseManager: {e}") |
| | raise |
| |
|
| | @staticmethod |
| | def load_config(config_path: str) -> dict: |
| | """ |
| | Load the configuration for Gradio GUI interface from the JSON file. |
| | """ |
| | logging.info(f"Loading configuration from {config_path}...") |
| | if not os.path.exists(config_path): |
| | logging.error(f"Configuration file not found: {config_path}") |
| | raise FileNotFoundError(f"Configuration file not found: {config_path}") |
| |
|
| | with open(config_path, 'r') as config_file: |
| | config = json.load(config_file) |
| |
|
| | required_keys = [ |
| | "chatbot_title", |
| | "chatbot_description", |
| | "chatbot_input_label", |
| | "chatbot_input_placeholder", |
| | "chatbot_output_label", |
| | "chatbot_reset_button", |
| | "chatbot_submit_button" |
| | ] |
| | for key in required_keys: |
| | if key not in config: |
| | logging.error(f"Missing required configuration key: {key}") |
| | raise ValueError(f"Missing required configuration key: {key}") |
| |
|
| | logging.info("Configuration loaded successfully.") |
| | return config |
| |
|
| | def create_interface(self) -> gr.Blocks: |
| | """ |
| | Create the Gradio Blocks interface that displays a single container including both |
| | the text input and a small arrow submit button. The interface will clear the text input |
| | after each message is submitted. |
| | """ |
| | logging.info("Creating Gradio interface...") |
| |
|
| | with gr.Blocks() as demo: |
| | |
| | gr.Markdown(f"## {self.title}\n{self.description}") |
| |
|
| | |
| | chatbot_output = gr.Chatbot(label=self.output_label, type="messages") |
| | |
| | |
| | def clear_history() -> list: |
| | """ |
| | Clear the history. |
| | :return: An empty list to clear the history. |
| | """ |
| | return [] |
| | |
| | |
| | def process_input(user_message, chat_history): |
| | """ |
| | Call generate_response with the user's message and chat history. |
| | Return a tuple with the updated chat history and an empty string to clear the input. |
| | """ |
| | updated_history = self.response_manager.generate_response(user_message, chat_history) |
| | return updated_history |
| |
|
| | user_input = gr.ChatInterface( |
| | process_input, |
| | chatbot=chatbot_output, |
| | type="messages" |
| | ) |
| |
|
| | chatbot_output.clear(clear_history, outputs=[chatbot_output]) |
| |
|
| | logging.info("Gradio interface created successfully.") |
| | return demo |
| |
|