Spaces:
Sleeping
Sleeping
File size: 11,310 Bytes
b28fdd8 85df17a b28fdd8 6e1c017 b28fdd8 6e1c017 b28fdd8 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 | """
Level Bridge Chat -- Main app
Gradio Chat UI + FastAPI Bridge API endpoint, colocated.
Embedding (iframe):
<iframe src="https://your-space.hf.space?campaign_name=X&industry=EC&cvr=2.1" ...></iframe>
"""
from __future__ import annotations
import os
import base64
from pathlib import Path
import gradio as gr
from fastapi import FastAPI, Request
from fastapi.responses import JSONResponse
from bridge_models import BridgeRequest, DashboardContext, Metrics
from bridge_service import process_request
from session_store import store
# ---------------------------------------------------------------------------
# FastAPI app
# ---------------------------------------------------------------------------
fastapi_app = FastAPI(title="Level Bridge Chat API")
@fastapi_app.post("/api/chat/bridge")
async def bridge_endpoint(request: Request):
body = await request.json()
try:
req = BridgeRequest(**body)
except Exception as e:
return JSONResponse(
status_code=422,
content={"ok": False, "error_code": "VALIDATION_ERROR", "message": str(e)},
)
result = process_request(req)
return JSONResponse(content=result.model_dump())
@fastapi_app.get("/healthz")
async def healthz():
return {"ok": True, "sessions": store.count()}
# ---------------------------------------------------------------------------
# Helpers
# ---------------------------------------------------------------------------
def _parse_float(value: str | None) -> float | None:
if value is None:
return None
try:
return float(str(value).replace("%", "").replace("円", "").strip())
except ValueError:
return None
def _image_to_base64(image_path: str | None) -> str | None:
if not image_path:
return None
try:
with open(image_path, "rb") as f:
data = f.read()
ext = Path(image_path).suffix.lower().lstrip(".")
mime = {"jpg": "jpeg", "jpeg": "jpeg", "png": "png", "webp": "webp", "gif": "gif"}.get(ext, "png")
return f"data:image/{mime};base64," + base64.b64encode(data).decode("utf-8")
except Exception:
return None
def _format_response(result) -> str:
"""Format BridgeResponse or BridgeErrorResponse as markdown chat message."""
if not result.ok:
msg = f"**エラー**: {result.message}"
if hasattr(result, "fallback") and result.fallback and result.fallback.get("next_level_preview"):
nlp = result.fallback["next_level_preview"]
if nlp.get("needed_info"):
items = ", ".join(i["label"] for i in nlp["needed_info"])
msg += f"\n\n次に **{items}** を追加すると提案が可能になります。"
return msg
b = result.best_now
n = result.next_level_preview
level_label = {"level1": "Lv.1(基本情報)", "level2": "Lv.2(数値あり)", "level3": "Lv.3(画像あり)"}.get(
result.inferred_level, result.inferred_level
)
confidence_label = {"low": "低", "mid": "中", "high": "高"}.get(b.confidence, b.confidence)
lines = [
f"### 現在の提案 [{level_label} / 確信度: {confidence_label}]",
"",
b.summary,
"",
"**推奨アクション**",
]
for i, action in enumerate(b.actions, 1):
lines.append(f"{i}. {action}")
if n.next_level:
lines += [
"",
"---",
f"### 次レベル({n.next_level})で可能になること",
"",
]
if n.needed_info:
items = "、".join(f"`{i.label}`(例: {i.example})" for i in n.needed_info)
lines.append(f"**必要な情報**: {items}")
lines.append("")
for item in n.what_will_be_possible:
lines.append(f"- {item}")
if n.expected_impact:
lines += ["", f"*{n.expected_impact}*"]
else:
lines += ["", "---", "*全情報が揃っています。現在の提案は最高精度です。*"]
if result.follow_up_question:
lines += ["", f"**{result.follow_up_question}**"]
return "\n".join(lines)
# ---------------------------------------------------------------------------
# Gradio UI
# ---------------------------------------------------------------------------
def build_gradio_ui() -> gr.Blocks:
with gr.Blocks(title="Level Bridge Chat") as demo:
gr.Markdown("## Level Bridge Chat")
gr.Markdown(
"ダッシュボードの情報をもとに、広告改善提案を行います。"
" 情報を追加するたびに提案精度が向上します。"
)
# State
session_id_state = gr.State(None)
context_state = gr.State({})
# Level indicator
level_display = gr.Markdown("**情報レベル**: 初期化中...")
chatbot = gr.Chatbot(
label="提案チャット",
height=480,
)
with gr.Row():
msg_input = gr.Textbox(
placeholder="メッセージを入力(例: CVR 2.1%、CTR 0.8% です)",
label="メッセージ",
scale=4,
show_label=False,
)
send_btn = gr.Button("送信", variant="primary", scale=1)
with gr.Row():
image_upload = gr.Image(
label="クリエイティブ画像(任意)",
type="filepath",
height=160,
)
# --- Core chat function ---
def send_message(
message: str,
history: list,
session_id: str | None,
image_path: str | None,
ctx: dict,
):
if not message.strip() and not image_path:
return history, session_id, "", None, ctx, gr.update()
image_b64 = _image_to_base64(image_path)
# Build dashboard_context only when there are new values
dc = None
if ctx or image_b64:
metrics_obj = None
if ctx.get("cvr") or ctx.get("ctr") or ctx.get("cpa"):
metrics_obj = Metrics(
cvr=ctx.get("cvr"),
ctr=ctx.get("ctr"),
cpa=ctx.get("cpa"),
)
dc = DashboardContext(
campaign_name=ctx.get("campaign_name"),
industry=ctx.get("industry"),
metrics=metrics_obj,
image_base64=image_b64,
)
req = BridgeRequest(
session_id=session_id,
message=message,
dashboard_context=dc,
)
result = process_request(req)
response_text = _format_response(result)
new_session_id = getattr(result, "session_id", session_id)
new_level = getattr(result, "inferred_level", "level1")
level_label = {
"level1": "Lv.1(基本情報のみ)",
"level2": "Lv.2(定量データあり)",
"level3": "Lv.3(画像あり・最高精度)",
}.get(new_level, new_level)
history = history + [
{"role": "user", "content": message or "(画像を送信)"},
{"role": "assistant", "content": response_text},
]
# Clear context after first send (already stored in session)
new_ctx = {}
return (
history,
new_session_id,
"", # clear message input
None, # clear image
new_ctx,
gr.update(value=f"**情報レベル**: {level_label}"),
)
# --- Auto-initialize from URL query params ---
def on_load(request: gr.Request):
params = dict(request.query_params)
ctx = {}
if params.get("campaign_name"):
ctx["campaign_name"] = params["campaign_name"]
if params.get("industry"):
ctx["industry"] = params["industry"]
if params.get("cvr"):
ctx["cvr"] = _parse_float(params["cvr"])
if params.get("ctr"):
ctx["ctr"] = _parse_float(params["ctr"])
if params.get("cpa"):
ctx["cpa"] = _parse_float(params["cpa"])
if not ctx:
return [], None, ctx, gr.update(value="**情報レベル**: 未初期化(URLパラメータなし)")
# Auto-send Turn 1 with dashboard context
metrics_obj = None
if ctx.get("cvr") or ctx.get("ctr") or ctx.get("cpa"):
metrics_obj = Metrics(cvr=ctx.get("cvr"), ctr=ctx.get("ctr"), cpa=ctx.get("cpa"))
dc = DashboardContext(
campaign_name=ctx.get("campaign_name"),
industry=ctx.get("industry"),
metrics=metrics_obj,
)
req = BridgeRequest(session_id=None, message="", dashboard_context=dc)
result = process_request(req)
response_text = _format_response(result)
new_session_id = getattr(result, "session_id", None)
new_level = getattr(result, "inferred_level", "level1")
level_label = {
"level1": "Lv.1(基本情報のみ)",
"level2": "Lv.2(定量データあり)",
"level3": "Lv.3(画像あり・最高精度)",
}.get(new_level, new_level)
initial_history = [{"role": "assistant", "content": response_text}]
return (
initial_history,
new_session_id,
{},
gr.update(value=f"**情報レベル**: {level_label}"),
)
# Wire events
demo.load(
on_load,
inputs=None,
outputs=[chatbot, session_id_state, context_state, level_display],
)
send_btn.click(
send_message,
inputs=[msg_input, chatbot, session_id_state, image_upload, context_state],
outputs=[chatbot, session_id_state, msg_input, image_upload, context_state, level_display],
)
msg_input.submit(
send_message,
inputs=[msg_input, chatbot, session_id_state, image_upload, context_state],
outputs=[chatbot, session_id_state, msg_input, image_upload, context_state, level_display],
)
return demo
# ---------------------------------------------------------------------------
# Mount and launch
# ---------------------------------------------------------------------------
gradio_ui = build_gradio_ui()
app = gr.mount_gradio_app(fastapi_app, gradio_ui, path="/")
if __name__ == "__main__":
import uvicorn
port = int(os.environ.get("PORT", "7860"))
uvicorn.run(app, host="0.0.0.0", port=port, log_level="info")
|