Spaces:
Running
Running
| """BF-Realtime V2 — FastAPI entry point. | |
| Sadece routing ve startup logic. Tum is logic'i ayri modullerde: | |
| - product_index: Trek katalog XML parse + hash index | |
| - product_matcher: fuzzy ana urun + renk varyanti eslestirme | |
| - stock_service: BizimHesap + Trek PHP stok cache | |
| - tools: get_warehouse_stock implementasyonu | |
| - realtime_relay: OpenAI Realtime WS proxy | |
| """ | |
| from __future__ import annotations | |
| import asyncio | |
| import logging | |
| from fastapi import FastAPI, WebSocket | |
| from fastapi.responses import FileResponse, JSONResponse, Response | |
| from fastapi.staticfiles import StaticFiles | |
| from config import ( | |
| OPENAI_API_KEY, | |
| REALTIME_MODEL, | |
| REFRESH_INTERVAL, | |
| ) | |
| from browser_session import get_browser_session | |
| from product_index import background_refresh_loop, get_index | |
| from product_matcher import find_main_product_in_text | |
| from realtime_relay import realtime_relay | |
| from stock_service import ( | |
| cached_bh, | |
| cached_bh_inventory, | |
| get_cached_warehouse_xml, | |
| ) | |
| logging.basicConfig(level=logging.INFO) | |
| logger = logging.getLogger(__name__) | |
| app = FastAPI(title="BF-Realtime V2") | |
| app.mount("/static", StaticFiles(directory="static"), name="static") | |
| async def _startup(): | |
| """Background refresh tasks — index + warehouse XML her saatte tazelenir.""" | |
| async def warehouse_loop(): | |
| while True: | |
| try: | |
| await asyncio.to_thread(get_cached_warehouse_xml) | |
| except Exception: | |
| logger.exception("warehouse refresh hatasi") | |
| await asyncio.sleep(REFRESH_INTERVAL) | |
| asyncio.create_task(background_refresh_loop(REFRESH_INTERVAL)) | |
| asyncio.create_task(warehouse_loop()) | |
| # Headless Chromium'u arka planda baslat — ilk navigate icin gecikme yok | |
| async def _prewarm_browser(): | |
| try: | |
| await get_browser_session().ensure_started() | |
| logger.info("Browser pre-warm tamamlandi") | |
| except Exception: | |
| logger.exception("Browser pre-warm hatasi") | |
| asyncio.create_task(_prewarm_browser()) | |
| # ---------- Frontend ---------- | |
| async def root(): | |
| return FileResponse("static/index.html") | |
| async def health(): | |
| idx = get_index() | |
| return { | |
| "status": "ok", | |
| "model": REALTIME_MODEL, | |
| "has_api_key": bool(OPENAI_API_KEY), | |
| "index_main_count": idx.main_count, | |
| } | |
| # ---------- Realtime WebSocket ---------- | |
| async def ws_endpoint(client_ws: WebSocket): | |
| await realtime_relay(client_ws) | |
| # ---------- Browser stream WebSocket (sag monitor canli sayfa) ---------- | |
| async def browser_ws(ws: WebSocket): | |
| from fastapi import WebSocketDisconnect | |
| await ws.accept() | |
| session = get_browser_session() | |
| async def receive_loop(): | |
| try: | |
| while True: | |
| msg = await ws.receive_json() | |
| t = msg.get("type") | |
| if t == "click": | |
| await session.click(float(msg.get("x", 0)), float(msg.get("y", 0))) | |
| elif t == "scroll": | |
| await session.scroll(int(msg.get("dy", 0))) | |
| except WebSocketDisconnect: | |
| pass | |
| except Exception: | |
| logger.exception("browser_ws receive hatasi") | |
| try: | |
| await asyncio.gather(session.stream_to(ws), receive_loop()) | |
| except WebSocketDisconnect: | |
| pass | |
| except Exception: | |
| logger.exception("browser_ws hatasi") | |
| finally: | |
| try: | |
| await ws.close() | |
| except Exception: | |
| pass | |
| # Manuel test/debug — istenen URL'i headless browser'a yukle | |
| async def browser_navigate(url: str): | |
| await get_browser_session().navigate(url) | |
| return {"ok": True, "url": url} | |
| # Debug — su anki sayfanin screenshot + title + url | |
| async def browser_debug(url: str | None = None): | |
| s = get_browser_session() | |
| await s.ensure_started() | |
| if url: | |
| await s.navigate(url) | |
| title = "" | |
| cur = s.current_url | |
| try: | |
| if s._page: | |
| title = await s._page.title() | |
| cur = s._page.url | |
| except Exception: | |
| pass | |
| jpeg = await s.screenshot_jpeg() | |
| import base64 as _b | |
| return { | |
| "url": cur, | |
| "title": title, | |
| "screenshot_b64": _b.b64encode(jpeg).decode("ascii") if jpeg else None, | |
| } | |
| async def browser_screenshot(url: str | None = None): | |
| s = get_browser_session() | |
| await s.ensure_started() | |
| if url: | |
| await s.navigate(url) | |
| jpeg = await s.screenshot_jpeg() | |
| if not jpeg: | |
| return Response(status_code=503) | |
| return Response(content=jpeg, media_type="image/jpeg") | |
| # ---------- Public proxy: warehouse XML (digerservisler kullaniyor) ---------- | |
| async def warehouse_xml(): | |
| xml = await asyncio.to_thread(get_cached_warehouse_xml) | |
| if not xml: | |
| return Response( | |
| content='<?xml version="1.0" encoding="UTF-8"?>\n<Products></Products>', | |
| media_type="application/xml", | |
| headers={"Cache-Control": "no-cache"}, | |
| ) | |
| return Response( | |
| content=xml, | |
| media_type="application/xml", | |
| headers={ | |
| "Access-Control-Allow-Origin": "*", | |
| "Cache-Control": "public, max-age=300", | |
| }, | |
| ) | |
| # ---------- Public proxy: BizimHesap raw (tavsiye/sold/diger client'lar) ---------- | |
| async def bh_products(): | |
| from stock_service import bh_get | |
| from config import BIZIMHESAP_BASE, CACHE_TTL_BH_PRODUCTS | |
| data, status = await asyncio.to_thread( | |
| cached_bh, | |
| "products", | |
| lambda: bh_get(f"{BIZIMHESAP_BASE}/products"), | |
| CACHE_TTL_BH_PRODUCTS, | |
| ) | |
| if data is None: | |
| return JSONResponse( | |
| {"resultCode": 0, "errorText": "BizimHesap fetch failed", "data": None}, | |
| status_code=502, | |
| ) | |
| return JSONResponse( | |
| data, | |
| headers={ | |
| "Access-Control-Allow-Origin": "*", | |
| "X-Cache": status, | |
| "Cache-Control": "public, max-age=300", | |
| }, | |
| ) | |
| async def bh_warehouses(): | |
| from stock_service import bh_get | |
| from config import BIZIMHESAP_BASE, CACHE_TTL_BH_WAREHOUSES | |
| data, status = await asyncio.to_thread( | |
| cached_bh, | |
| "warehouses", | |
| lambda: bh_get(f"{BIZIMHESAP_BASE}/warehouses"), | |
| CACHE_TTL_BH_WAREHOUSES, | |
| ) | |
| if data is None: | |
| return JSONResponse( | |
| {"resultCode": 0, "errorText": "BizimHesap fetch failed", "data": None}, | |
| status_code=502, | |
| ) | |
| return JSONResponse( | |
| data, | |
| headers={ | |
| "Access-Control-Allow-Origin": "*", | |
| "X-Cache": status, | |
| "Cache-Control": "public, max-age=300", | |
| }, | |
| ) | |
| async def bh_inventory(wid: str): | |
| data, status = await asyncio.to_thread(cached_bh_inventory, wid) | |
| if data is None: | |
| return JSONResponse( | |
| {"resultCode": 0, "errorText": "BizimHesap fetch failed", "data": None}, | |
| status_code=502, | |
| ) | |
| return JSONResponse( | |
| data, | |
| headers={ | |
| "Access-Control-Allow-Origin": "*", | |
| "X-Cache": status, | |
| "Cache-Control": "public, max-age=300", | |
| }, | |
| ) | |
| # ---------- Debug ---------- | |
| async def debug_find(q: str): | |
| p = find_main_product_in_text(q) | |
| return {"query": q, "matched": (p.get("name") if p else None), "result": p} | |
| async def debug_search(q: str): | |
| # tool gibi normalize et (Jaddebostan -> Caddebostan) | |
| from tools import handle_tool_call_sync | |
| result = await asyncio.to_thread( | |
| handle_tool_call_sync, "check_warehouse_stock", {"user_message": q} | |
| ) | |
| return {"query": q, "result": result} | |
| async def debug_show(q: str): | |
| from tools import show_product_local | |
| result = await asyncio.to_thread(show_product_local, q) | |
| return {"query": q, "result": result} | |