import ast import os import json import hashlib import base64 import logging from fastapi import APIRouter from typing import Optional from redis import Redis from fastapi.responses import StreamingResponse from api.stored_data import stored_data from src.genai.analytics_chatbot.agent import ChatbotAgent from src.genai.analytics_chatbot.utils.utils import process_query from src.genai.analytics_chatbot.handlers.analytics_description import generate_analytics_description from api.schemas.analytics_chatbot import AnalyticsChatRequest from config.redis_config import redis_client from openai import OpenAI app_logger = logging.getLogger("app_logger") error_logger = logging.getLogger("error_logger") router = APIRouter() agent=ChatbotAgent() graph = agent.chatbot_graph() @router.post("/analytics-chatbot") def get_analytics(request: AnalyticsChatRequest): user_query = process_query(request.msg) print('Processed user query:', user_query) cache_key = f"analytics:{hashlib.md5(user_query.encode()).hexdigest()}" cached_response = redis_client.get(cache_key) print('cached-response:', cached_response) if cached_response: response_to_cache = json.loads(cached_response) else: response_to_cache = {} if not response_to_cache.get('response') or not response_to_cache.get('endpoint'): config = {"configurable": {"thread_id": "analytics-chatbot-thread"}, "run_name": "analytics-chatbot"} result = graph.invoke({'messages': user_query}, config=config) if result.get('backup_data') is not None: response_to_cache['backup_response'] = result['backup_data'] else: response_to_cache['response'] = result['response'] response_to_cache['endpoint'] = result['endpoint'] if request.image_base64 and not response_to_cache.get('description'): description = generate_analytics_description(user_query, request.image_base64) if description is not None: response_to_cache['description'] = description redis_client.set(cache_key, json.dumps(response_to_cache), ex=3000) return response_to_cache