trygithubactions / main.py
subashpoudel's picture
Refined the files
eb40d68
raw
history blame
3.88 kB
from fastapi import FastAPI , UploadFile , File , Form
from fastapi.responses import JSONResponse
from pydantic import BaseModel
from brainstroming_agent.agent import brainstroming_graph
import pandas as pd
from typing import Optional , List
from context_analysis_agent.agent import IntroductionChatbot
from business_interaction_agent.agent import BusinessInteractionChatbot
from context_analysis_agent.utils.utils import save_to_db
import ast
from brainstroming_agent.utils.utils import encode_image_to_base64 , generate_final_story, generate_image
from fastapi import Body, Query
from ideation_agent.agent import ideation_graph
import json
# Store brainstorming results per thread_id
app = FastAPI()
context_analysis_graph = IntroductionChatbot()
business_interaction_graph = BusinessInteractionChatbot()
idea_graph = ideation_graph()
brainstrom_graph = brainstroming_graph()
stored_data={}
stored_data['business_details']={"business_type": "cosmetic", "platform": "instagram", "target_audience": "youths", "business_goals": "to go global", "offerings": "nepali skin care products", "Challenges_faced": "finding new customers, attracting large customers"}
# stored_data['business_details']={}
class UserMessage(BaseModel):
message: str
@app.post("/context-analysis")
def context_analysis(msg: UserMessage):
response = context_analysis_graph.chat(msg.message)
if context_analysis_graph.is_complete(response):
details = context_analysis_graph.extract_details()
if type(details) != dict:
details = details.model_dump()
print('Business_details:',details)
if isinstance(details, str):
details= ast.literal_eval(details)
print('Details Type:',type(details))
save_to_db(details)
stored_data['business_details'] = details
return {"response": response, "business_details": details, "complete": True}
return {"response": response, "complete": False}
@app.post("/business-interaction")
def business_interaction(interaction: str):
response,business_details = business_interaction_graph.chat(interaction , stored_data['business_details'])
stored_data['business_details']=business_details
return {'response': response}
class IdeationRequest(BaseModel):
topic : List[str]
business_details: dict
@app.post("/ideation")
def ideation_endpoint(request:IdeationRequest):
result = idea_graph.invoke({
'topic': request.topic,
'business_details': stored_data['business_details']
})
return {'response':result}
class BrainstormRequest(BaseModel):
query: List[str]
preferred_topics: Optional[List] = []
image_base64_list: Optional[List] = []
thread_id: Optional[str]="default-session"
@app.post("/brainstrom")
def brainstroming_endpoint(
request: BrainstormRequest, # 🔥 Full JSON body here
# thread_id: Optional[str] = Query("default-session"), # Separate query param
):
result = brainstrom_graph.invoke({
'topic': request.query,
'images': request.image_base64_list,
'latest_preferred_topics': request.preferred_topics,
'business_details': (lambda d: d['business_details'] if 'business_details' in d else {})(stored_data)
},
config={"configurable": {"thread_id": request.thread_id}})
stored_data['brainstroming_response'] = result
return {'response': result}
@app.post("/generate-final-story")
def generate_final_story_endpoint():
final_story = generate_final_story(stored_data["brainstroming_response"])
stored_data['final_story']=final_story
return {
'response': final_story
}
@app.post("/generate-image")
def generate_image_endpoint():
image = generate_image(str(stored_data['final_story']))
stored_data['generated_image']=image
return {
'response':image
}