Spaces:
Sleeping
Sleeping
File size: 3,438 Bytes
a9f99c3 be3a5c4 93a5bf9 be3a5c4 a9f99c3 93a5bf9 24b940c 93a5bf9 708437f 946d35b a9f99c3 946d35b be3a5c4 93a5bf9 be3a5c4 946d35b 3f2f8aa 85a68fb b55b8d4 93a5bf9 db141d0 24b940c 946d35b b55b8d4 85a68fb 508df21 93a5bf9 db141d0 92115be 85a68fb b55b8d4 da1776b a9f99c3 946d35b a9f99c3 93a5bf9 a9f99c3 946d35b d604e49 da1776b a9f99c3 946d35b 85a68fb a9f99c3 946d35b 708437f 946d35b 708437f 946d35b 708437f 06e8ef4 708437f 946d35b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 |
from fastapi import FastAPI , UploadFile , File , Form
from fastapi.responses import JSONResponse
from pydantic import BaseModel
from brainstroming_agent.agent import brainstroming_graph
import pandas as pd
from typing import Optional , List
from context_analysis_agent.agent import IntroductionChatbot
from business_interaction_agent.agent import BusinessInteractionChatbot
from context_analysis_agent.utils.utils import save_to_db
import ast
from brainstroming_agent.utils.utils import encode_image_to_base64 , generate_final_story, generate_image
import json
# Store brainstorming results per thread_id
app = FastAPI()
context_analysis_graph = IntroductionChatbot()
business_interaction_graph = BusinessInteractionChatbot()
brainstrom_graph = brainstroming_graph()
stored_data={}
stored_data['business_details']={"business_type": "restaurant", "platform": "instagram", "target_audience": "youths", "business_goals": "to go global", "offerings": "nepali foods", "Challenges_faced": "finding new customers, attracting large customers"}
class UserMessage(BaseModel):
message: str
@app.post("/context-analysis")
def context_analysis(msg: UserMessage):
response = context_analysis_graph.chat(msg.message)
if context_analysis_graph.is_complete(response):
details = context_analysis_graph.extract_details()
if type(details) != dict:
details = details.model_dump()
print('Business_details:',details)
if isinstance(details, str):
details= ast.literal_eval(details)
print('Details Type:',type(details))
save_to_db(details)
stored_data['business_details'] = details
return {"response": response, "business_details": details, "complete": True}
return {"response": response, "complete": False}
@app.post("/business-interaction")
def business_interaction(interaction: str):
response,business_details = business_interaction_graph.chat(interaction , stored_data['business_details'])
stored_data['business_details']=business_details
return {'response': response}
@app.post("/brainstrom")
def brainstroming_endpoint(
query: List[str], # sent as JSON body
preferred_topics: Optional[list] = [],
images: Optional[List[UploadFile]] = [], # ✅ Optional UploadFile list
thread_id: Optional[str] = "default-session",
):
# Convert uploaded images to base64
image_base64_list = [encode_image_to_base64(img) for img in images]
# Invoke LangGraph
result = brainstrom_graph.invoke({
'topic': query,
'images': image_base64_list,
'latest_preferred_topics':preferred_topics,
'business_details': (lambda d: d['business_details'] if 'business_details' in d else {})(stored_data)
},
config={"configurable": {"thread_id": thread_id}})
stored_data['brainstroming_response']=result
# brainstorm_store[thread_id] = result
return {
'response': result,
}
@app.post("/generate-final-story")
def generate_final_story_endpoint():
final_story = generate_final_story(stored_data["brainstroming_response"])
stored_data['final_story']=final_story
return {
'response': final_story
}
@app.post("/generate-image")
def generate_image_endpoint():
image = generate_image(str(stored_data['final_story']))
stored_data['generated_image']=image
return {
'response':image
}
|