Spaces:
Sleeping
Sleeping
| from fastapi import FastAPI , UploadFile , File , Form | |
| from fastapi.responses import JSONResponse | |
| from pydantic import BaseModel | |
| from brainstroming_agent.agent import brainstroming_graph | |
| import pandas as pd | |
| from typing import Optional , List | |
| from context_analysis_agent.agent import IntroductionChatbot | |
| from business_interaction_agent.agent import BusinessInteractionChatbot | |
| from context_analysis_agent.utils.utils import save_to_db | |
| import ast | |
| from brainstroming_agent.utils.utils import encode_image_to_base64 , generate_final_story, generate_image | |
| import json | |
| # Store brainstorming results per thread_id | |
| app = FastAPI() | |
| context_analysis_graph = IntroductionChatbot() | |
| business_interaction_graph = BusinessInteractionChatbot() | |
| brainstrom_graph = brainstroming_graph() | |
| stored_data={} | |
| stored_data['business_details']={"business_type": "restaurant", "platform": "instagram", "target_audience": "youths", "business_goals": "to go global", "offerings": "nepali foods", "Challenges_faced": "finding new customers, attracting large customers"} | |
| class UserMessage(BaseModel): | |
| message: str | |
| def context_analysis(msg: UserMessage): | |
| response = context_analysis_graph.chat(msg.message) | |
| if context_analysis_graph.is_complete(response): | |
| details = context_analysis_graph.extract_details() | |
| if type(details) != dict: | |
| details = details.model_dump() | |
| print('Business_details:',details) | |
| if isinstance(details, str): | |
| details= ast.literal_eval(details) | |
| print('Details Type:',type(details)) | |
| save_to_db(details) | |
| stored_data['business_details'] = details | |
| return {"response": response, "business_details": details, "complete": True} | |
| return {"response": response, "complete": False} | |
| def business_interaction(interaction: str): | |
| response,business_details = business_interaction_graph.chat(interaction , stored_data['business_details']) | |
| stored_data['business_details']=business_details | |
| return {'response': response} | |
| def brainstroming_endpoint( | |
| query: List[str], # sent as JSON body | |
| preferred_topics: Optional[list] = [], | |
| images: Optional[List[UploadFile]] = [], # ✅ Optional UploadFile list | |
| thread_id: Optional[str] = "default-session", | |
| ): | |
| # Convert uploaded images to base64 | |
| image_base64_list = [encode_image_to_base64(img) for img in images] | |
| # Invoke LangGraph | |
| result = brainstrom_graph.invoke({ | |
| 'topic': query, | |
| 'images': image_base64_list, | |
| 'latest_preferred_topics':preferred_topics, | |
| 'business_details': (lambda d: d['business_details'] if 'business_details' in d else {})(stored_data) | |
| }, | |
| config={"configurable": {"thread_id": thread_id}}) | |
| stored_data['brainstroming_response']=result | |
| # brainstorm_store[thread_id] = result | |
| return { | |
| 'response': result, | |
| } | |
| def generate_final_story_endpoint(): | |
| final_story = generate_final_story(stored_data["brainstroming_response"]) | |
| stored_data['final_story']=final_story | |
| return { | |
| 'response': final_story | |
| } | |
| def generate_image_endpoint(): | |
| image = generate_image(str(stored_data['final_story'])) | |
| stored_data['generated_image']=image | |
| return { | |
| 'response':image | |
| } | |