Spaces:
Sleeping
Sleeping
File size: 2,749 Bytes
a9f99c3 be3a5c4 a9f99c3 92115be 508df21 92115be 708437f 946d35b a9f99c3 946d35b be3a5c4 92115be 508df21 be3a5c4 946d35b 85a68fb b55b8d4 92115be 946d35b b55b8d4 85a68fb 508df21 92115be 508df21 92115be 85a68fb a9f99c3 508df21 b55b8d4 da1776b a9f99c3 946d35b a9f99c3 946d35b d604e49 da1776b a9f99c3 946d35b 85a68fb a9f99c3 946d35b 708437f 946d35b 708437f 946d35b 708437f 06e8ef4 708437f 946d35b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 |
from fastapi import FastAPI , UploadFile , File , Form
from fastapi.responses import JSONResponse
from pydantic import BaseModel
from my_agent.agent import build_graph
import pandas as pd
from typing import Optional , List
from my_agent.utils.initial_interaction import IntroductionChatbot
from my_agent.utils.business_interaction import BusinessInteractionChatbot
# from my_agent.utils.check import BusinessInteractionChatbot
from my_agent.utils.utils import encode_image_to_base64 , generate_final_story, generate_image
import json
# Store brainstorming results per thread_id
app = FastAPI()
introduction_chatbot = IntroductionChatbot()
interaction_chatbot2 = BusinessInteractionChatbot()
graph = build_graph()
stored_data={}
class UserMessage(BaseModel):
message: str
@app.post("/business-interaction")
def business_introduction_chat(msg: UserMessage):
response = introduction_chatbot.chat(msg.message)
if introduction_chatbot.is_complete(response):
details = introduction_chatbot.extract_details()
stored_data['business_details'] = details
return {"response": response, "business_details": details, "complete": True}
return {"response": response, "complete": False}
@app.post("/business-interaction2")
def business_interaction_chat(interaction: str):
response = interaction_chatbot2.chat(interaction)
return {'response': response}
@app.post("/brainstrom")
def brainstroming_endpoint(
query: List[str], # sent as JSON body
preferred_topics: Optional[list] = [],
images: Optional[List[UploadFile]] = [], # ✅ Optional UploadFile list
thread_id: Optional[str] = "default-session",
):
# Convert uploaded images to base64
image_base64_list = [encode_image_to_base64(img) for img in images]
# Invoke LangGraph
result = graph.invoke({
'topic': query,
'images': image_base64_list,
'latest_preferred_topics':preferred_topics,
'business_details': (lambda d: d['business_details'] if 'business_details' in d else {})(stored_data)
},
config={"configurable": {"thread_id": thread_id}})
stored_data['brainstroming_response']=result
# brainstorm_store[thread_id] = result
return {
'response': result,
}
@app.post("/generate-final-story")
def generate_final_story_endpoint():
final_story = generate_final_story(stored_data["brainstroming_response"])
stored_data['final_story']=final_story
return {
'response': final_story
}
@app.post("/generate-image")
def generate_image_endpoint():
image = generate_image(str(stored_data['final_story']))
stored_data['generated_image']=image
return {
'response':image
}
|