Song commited on
Commit ·
238cf71
1
Parent(s): caa1332
hi
Browse files- .gitignore +12 -0
- Dockerfile +44 -0
- README.md +434 -8
- app.py +649 -0
- cache.py +249 -0
- chat_service.py +445 -0
- config.py +108 -0
- crud.py +453 -0
- data/慢性病飲食原則.md +57 -0
- database.py +91 -0
- dependencies.py +335 -0
- exceptions.py +237 -0
- menu_data.py +413 -0
- models.py +254 -0
- rag.py +469 -0
- requirements.txt +35 -0
- schemas.py +275 -0
- setup_rag_db.py +81 -0
- stripe_service.py +493 -0
.gitignore
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Python
|
| 2 |
+
__pycache__/
|
| 3 |
+
*.py[cod]
|
| 4 |
+
*$py.class
|
| 5 |
+
venv/
|
| 6 |
+
.env
|
| 7 |
+
|
| 8 |
+
# Data and large files (using Git LFS for PDFs)
|
| 9 |
+
data/*.pdf
|
| 10 |
+
|
| 11 |
+
# OS files
|
| 12 |
+
.DS_Store
|
Dockerfile
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Use Python 3.11 slim image for production
|
| 2 |
+
FROM python:3.11-slim
|
| 3 |
+
|
| 4 |
+
# Set environment variables
|
| 5 |
+
ENV PYTHONDONTWRITEBYTECODE=1 \
|
| 6 |
+
PYTHONUNBUFFERED=1 \
|
| 7 |
+
PYTHONPATH=/app \
|
| 8 |
+
PIP_NO_CACHE_DIR=1 \
|
| 9 |
+
PIP_DISABLE_PIP_VERSION_CHECK=1
|
| 10 |
+
|
| 11 |
+
# Set work directory
|
| 12 |
+
WORKDIR /app
|
| 13 |
+
|
| 14 |
+
# Install system dependencies
|
| 15 |
+
RUN apt-get update && apt-get install -y \
|
| 16 |
+
gcc \
|
| 17 |
+
g++ \
|
| 18 |
+
curl \
|
| 19 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 20 |
+
|
| 21 |
+
# Copy requirements first for better caching
|
| 22 |
+
COPY requirements.txt .
|
| 23 |
+
|
| 24 |
+
# Install Python dependencies
|
| 25 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
| 26 |
+
|
| 27 |
+
# Copy application code
|
| 28 |
+
COPY . .
|
| 29 |
+
|
| 30 |
+
# Create non-root user for security
|
| 31 |
+
RUN groupadd -r appuser && useradd -r -g appuser appuser
|
| 32 |
+
RUN chown -R appuser:appuser /app
|
| 33 |
+
USER appuser
|
| 34 |
+
|
| 35 |
+
# Expose port (Hugging Face Spaces default is 7860)
|
| 36 |
+
ENV PORT=7860
|
| 37 |
+
EXPOSE 7860
|
| 38 |
+
|
| 39 |
+
# Health check
|
| 40 |
+
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
|
| 41 |
+
CMD curl -f http://localhost:7860/health || exit 1
|
| 42 |
+
|
| 43 |
+
# Run the application
|
| 44 |
+
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860", "--workers", "1"]
|
README.md
CHANGED
|
@@ -1,10 +1,436 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8 |
---
|
| 9 |
|
| 10 |
-
|
|
|
|
| 1 |
+
# 銀髮餐桌助手 Backend API
|
| 2 |
+
|
| 3 |
+
專為台灣銀髮族設計的AI營養飲食顧問服務後端,提供REST API接口和Gradio聊天界面。
|
| 4 |
+
|
| 5 |
+
## 項目概述
|
| 6 |
+
|
| 7 |
+
銀髮餐桌助手是一個結合AI技術的智能營養顧問系統,專為台灣銀髮族(65歲以上)提供個性化飲食建議和營養諮詢。系統採用FastAPI框架構建,提供完整的API接口和用戶友好的Gradio聊天界面。
|
| 8 |
+
|
| 9 |
+
## 核心功能
|
| 10 |
+
|
| 11 |
+
### 🔐 身份驗證與授權
|
| 12 |
+
- 基於Supabase Auth的JWT身份驗證
|
| 13 |
+
- 支持多種用戶角色(user、family、admin)
|
| 14 |
+
- 角色基礎的訪問控制(RBAC)
|
| 15 |
+
|
| 16 |
+
### 👤 用戶資料管理
|
| 17 |
+
- 用戶檔案創建與更新
|
| 18 |
+
- 健康狀況和飲食偏好管理
|
| 19 |
+
- 個人化營養建議基礎
|
| 20 |
+
|
| 21 |
+
### 💬 AI聊天諮詢
|
| 22 |
+
- 智能營養飲食對話
|
| 23 |
+
- 基於RAG的知識檢索增強生成
|
| 24 |
+
- 支持個人化回應(基於用戶檔案)
|
| 25 |
+
- Gradio無登入聊天界面
|
| 26 |
+
|
| 27 |
+
### 🍽️ 菜單管理
|
| 28 |
+
- 台灣在地銀髮友善餐點
|
| 29 |
+
- 營養成分分析
|
| 30 |
+
- 飲食標籤分類
|
| 31 |
+
- 季節性和健康條件適配
|
| 32 |
+
|
| 33 |
+
### 🛒 訂單管理
|
| 34 |
+
- 餐點訂購功能
|
| 35 |
+
- Stripe支付集成
|
| 36 |
+
- 訂單狀態追蹤
|
| 37 |
+
- 營養統計分析
|
| 38 |
+
|
| 39 |
+
### 💰 捐款系統
|
| 40 |
+
- 匿名和實名捐款支持
|
| 41 |
+
- Stripe支付處理
|
| 42 |
+
- 捐款記錄管理
|
| 43 |
+
|
| 44 |
+
### 📊 營養儀表板
|
| 45 |
+
- 個人營養攝取統計
|
| 46 |
+
- 飲食習慣分析
|
| 47 |
+
- 家庭成員健康監控
|
| 48 |
+
|
| 49 |
+
## 技術架構
|
| 50 |
+
|
| 51 |
+
### 核心技術棧
|
| 52 |
+
- **FastAPI** - 高性能異步API框架
|
| 53 |
+
- **SQLModel** - Python ORM與數據建模
|
| 54 |
+
- **PostgreSQL** - 主數據庫(通過Supabase)
|
| 55 |
+
- **pgvector** - 向量搜索支持
|
| 56 |
+
- **Supabase** - 認證、數據庫和向量存儲
|
| 57 |
+
- **OpenAI** - AI模型和向量嵌入
|
| 58 |
+
- **LangChain** - RAG實現框架
|
| 59 |
+
- **Stripe** - 支付處理
|
| 60 |
+
- **Gradio** - 聊天界面
|
| 61 |
+
|
| 62 |
+
### 系統架構
|
| 63 |
+
```
|
| 64 |
+
┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐
|
| 65 |
+
│ Frontend │ │ Backend API │ │ External │
|
| 66 |
+
│ (React/Vue) │◄──►│ (FastAPI) │◄──►│ Services │
|
| 67 |
+
└─────────────────┘ └─────────────────┘ └─────────────────┘
|
| 68 |
+
│
|
| 69 |
+
▼
|
| 70 |
+
┌─────────────────┐
|
| 71 |
+
│ Database │
|
| 72 |
+
│ (Supabase │
|
| 73 |
+
│ PostgreSQL) │
|
| 74 |
+
└─────────────────┘
|
| 75 |
+
```
|
| 76 |
+
|
| 77 |
+
## 快速開始
|
| 78 |
+
|
| 79 |
+
### 前置要求
|
| 80 |
+
|
| 81 |
+
- Python 3.8+
|
| 82 |
+
- PostgreSQL 13+ (通過Supabase)
|
| 83 |
+
- Supabase項目
|
| 84 |
+
- OpenAI API Key
|
| 85 |
+
- Stripe賬戶
|
| 86 |
+
|
| 87 |
+
### 環境變量配置
|
| 88 |
+
|
| 89 |
+
複製 `.env.example` 到 `.env` 並填入以下配置:
|
| 90 |
+
|
| 91 |
+
```bash
|
| 92 |
+
# Supabase Configuration
|
| 93 |
+
SUPABASE_URL=your_supabase_project_url
|
| 94 |
+
SUPABASE_SERVICE_ROLE_KEY=your_supabase_service_role_key
|
| 95 |
+
SUPABASE_ANON_KEY=your_supabase_anon_key
|
| 96 |
+
|
| 97 |
+
# Database URL for pgvector (Supabase PostgreSQL)
|
| 98 |
+
DATABASE_URL=postgresql+asyncpg://postgres:[YOUR_PASSWORD]@db.[YOUR_PROJECT_ID].supabase.co:5432/postgres
|
| 99 |
+
|
| 100 |
+
# OpenAI Configuration
|
| 101 |
+
OPENAI_API_KEY=your_openai_api_key
|
| 102 |
+
|
| 103 |
+
# Stripe Configuration
|
| 104 |
+
STRIPE_SECRET_KEY=sk_test_your_stripe_secret_key
|
| 105 |
+
STRIPE_PUBLISHABLE_KEY=pk_test_your_stripe_publishable_key
|
| 106 |
+
STRIPE_WEBHOOK_SECRET=whsec_your_stripe_webhook_secret
|
| 107 |
+
|
| 108 |
+
# JWT Secret (if using custom JWT handling)
|
| 109 |
+
JWT_SECRET=your_jwt_secret_key
|
| 110 |
+
|
| 111 |
+
# Environment
|
| 112 |
+
ENVIRONMENT=development
|
| 113 |
+
|
| 114 |
+
# CORS Origins (comma-separated)
|
| 115 |
+
CORS_ORIGINS=http://localhost:3000,http://localhost:5173
|
| 116 |
+
|
| 117 |
+
# Frontend URL for redirects
|
| 118 |
+
FRONTEND_URL=http://localhost:5173
|
| 119 |
+
|
| 120 |
+
# Server Configuration
|
| 121 |
+
HOST=0.0.0.0
|
| 122 |
+
PORT=8000
|
| 123 |
+
```
|
| 124 |
+
|
| 125 |
+
### 安裝依賴
|
| 126 |
+
|
| 127 |
+
```bash
|
| 128 |
+
# 克隆項目
|
| 129 |
+
git clone <repository-url>
|
| 130 |
+
cd silver-table-assistant/backend
|
| 131 |
+
|
| 132 |
+
# 創建虛擬環境
|
| 133 |
+
python -m venv venv
|
| 134 |
+
source venv/bin/activate # Linux/Mac
|
| 135 |
+
# 或 venv\Scripts\activate # Windows
|
| 136 |
+
|
| 137 |
+
# 安裝依賴
|
| 138 |
+
pip install -r requirements.txt
|
| 139 |
+
```
|
| 140 |
+
|
| 141 |
+
### 啟動服務
|
| 142 |
+
|
| 143 |
+
#### 開發模式
|
| 144 |
+
```bash
|
| 145 |
+
# 啟動FastAPI服務(自動重載)
|
| 146 |
+
python app.py
|
| 147 |
+
|
| 148 |
+
# 或使用uvicorn
|
| 149 |
+
uvicorn app:app --reload --host 0.0.0.0 --port 8000
|
| 150 |
+
```
|
| 151 |
+
|
| 152 |
+
#### 生產模式
|
| 153 |
+
```bash
|
| 154 |
+
# 使用uvicorn生產配置
|
| 155 |
+
uvicorn app:app --host 0.0.0.0 --port 8000 --workers 4
|
| 156 |
+
```
|
| 157 |
+
|
| 158 |
+
### 訪問應用
|
| 159 |
+
|
| 160 |
+
- **API文檔**: http://localhost:8000/api/docs
|
| 161 |
+
- **ReDoc文檔**: http://localhost:8000/api/redoc
|
| 162 |
+
- **Gradio聊天界面**: http://localhost:8000/
|
| 163 |
+
- **健康檢查**: http://localhost:8000/health
|
| 164 |
+
|
| 165 |
+
## API文檔
|
| 166 |
+
|
| 167 |
+
### 身份驗證
|
| 168 |
+
|
| 169 |
+
所有需要認證的端點都需要在請求頭中包含JWT token:
|
| 170 |
+
|
| 171 |
+
```http
|
| 172 |
+
Authorization: Bearer <your-jwt-token>
|
| 173 |
+
```
|
| 174 |
+
|
| 175 |
+
### 主要端點
|
| 176 |
+
|
| 177 |
+
#### 用戶資料
|
| 178 |
+
- `GET /api/profiles` - 獲取用戶資料列表
|
| 179 |
+
- `POST /api/profiles` - 創建或更新用戶��料
|
| 180 |
+
|
| 181 |
+
#### 聊天諮詢
|
| 182 |
+
- `POST /api/chat` - AI聊天諮詢(需要認證)
|
| 183 |
+
- `GET /` - Gradio聊天界面(無需認證)
|
| 184 |
+
|
| 185 |
+
#### 菜單管理
|
| 186 |
+
- `GET /api/menu` - 獲取完整菜單
|
| 187 |
+
|
| 188 |
+
#### 訂單管理
|
| 189 |
+
- `POST /api/orders` - 創建新訂單(需要認證)
|
| 190 |
+
|
| 191 |
+
#### 捐款系統
|
| 192 |
+
- `POST /api/donations` - 創建捐款(支持匿名)
|
| 193 |
+
|
| 194 |
+
#### 支付處理
|
| 195 |
+
- `POST /api/webhook` - Stripe支付回調
|
| 196 |
+
|
| 197 |
+
#### 儀表板
|
| 198 |
+
- `GET /api/dashboard/{profile_id}` - 獲取營養統計(需要family角色)
|
| 199 |
+
|
| 200 |
+
### 請求/響應範例
|
| 201 |
+
|
| 202 |
+
#### 創建用戶資料
|
| 203 |
+
```bash
|
| 204 |
+
curl -X POST "http://localhost:8000/api/profiles" \
|
| 205 |
+
-H "Authorization: Bearer <token>" \
|
| 206 |
+
-H "Content-Type: application/json" \
|
| 207 |
+
-d '{
|
| 208 |
+
"display_name": "張爺爺",
|
| 209 |
+
"age": 75,
|
| 210 |
+
"health_condition": "高血壓",
|
| 211 |
+
"dietary_restrictions": "低鈉飲食"
|
| 212 |
+
}'
|
| 213 |
+
```
|
| 214 |
+
|
| 215 |
+
#### AI聊天諮詢
|
| 216 |
+
```bash
|
| 217 |
+
curl -X POST "http://localhost:8000/api/chat" \
|
| 218 |
+
-H "Authorization: Bearer <token>" \
|
| 219 |
+
-H "Content-Type: application/json" \
|
| 220 |
+
-d '{
|
| 221 |
+
"message": "請問銀髮族應該如何補充蛋白質?",
|
| 222 |
+
"profile_id": "uuid-string"
|
| 223 |
+
}'
|
| 224 |
+
```
|
| 225 |
+
|
| 226 |
+
## 數據庫架構
|
| 227 |
+
|
| 228 |
+
### 主要數據表
|
| 229 |
+
|
| 230 |
+
#### profiles
|
| 231 |
+
用戶資料表,存儲個人信息和健康狀況。
|
| 232 |
+
|
| 233 |
+
#### orders
|
| 234 |
+
訂單表,記錄食物訂購和支付信息。
|
| 235 |
+
|
| 236 |
+
#### donations
|
| 237 |
+
捐款表,管理捐款信息和狀態。
|
| 238 |
+
|
| 239 |
+
#### menu_items
|
| 240 |
+
菜單項目表,存儲餐點信息和營養數據。
|
| 241 |
+
|
| 242 |
+
#### chat_conversations
|
| 243 |
+
聊天記錄表,存儲用戶與AI的對話歷史。
|
| 244 |
+
|
| 245 |
+
### 數據庫初始化
|
| 246 |
+
|
| 247 |
+
系統會在啟動時自動創建所需的數據表結構。確保Supabase項目已啟用pgvector擴展:
|
| 248 |
+
|
| 249 |
+
```sql
|
| 250 |
+
-- 在Supabase SQL Editor中執行
|
| 251 |
+
CREATE EXTENSION IF NOT EXISTS vector;
|
| 252 |
+
```
|
| 253 |
+
|
| 254 |
+
## 部署指南
|
| 255 |
+
|
| 256 |
+
### Hugging Face Spaces 部署
|
| 257 |
+
|
| 258 |
+
1. **準備部署文件**
|
| 259 |
+
```bash
|
| 260 |
+
# 創建部署所需文件
|
| 261 |
+
touch README.md
|
| 262 |
+
echo "fastapi>=0.104.0" > requirements.txt
|
| 263 |
+
echo "uvicorn[standard]>=0.24.0" >> requirements.txt
|
| 264 |
+
# 添加其他必要依賴
|
| 265 |
+
```
|
| 266 |
+
|
| 267 |
+
2. **配置環境變量**
|
| 268 |
+
在Hugging Face Spaces設置中添加所有必要的環境變量。
|
| 269 |
+
|
| 270 |
+
3. **創建入口文件**
|
| 271 |
+
確保項目根目錄有適當的啟動配置。
|
| 272 |
+
|
| 273 |
+
### Docker 部署
|
| 274 |
+
|
| 275 |
+
#### Dockerfile
|
| 276 |
+
```dockerfile
|
| 277 |
+
FROM python:3.9-slim
|
| 278 |
+
|
| 279 |
+
WORKDIR /app
|
| 280 |
+
|
| 281 |
+
COPY requirements.txt .
|
| 282 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
| 283 |
+
|
| 284 |
+
COPY . .
|
| 285 |
+
|
| 286 |
+
EXPOSE 8000
|
| 287 |
+
|
| 288 |
+
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "8000"]
|
| 289 |
+
```
|
| 290 |
+
|
| 291 |
+
#### docker-compose.yml
|
| 292 |
+
```yaml
|
| 293 |
+
version: '3.8'
|
| 294 |
+
services:
|
| 295 |
+
backend:
|
| 296 |
+
build: .
|
| 297 |
+
ports:
|
| 298 |
+
- "8000:8000"
|
| 299 |
+
environment:
|
| 300 |
+
- ENVIRONMENT=production
|
| 301 |
+
env_file:
|
| 302 |
+
- .env
|
| 303 |
+
```
|
| 304 |
+
|
| 305 |
+
#### 構建和運行
|
| 306 |
+
```bash
|
| 307 |
+
docker-compose up --build
|
| 308 |
+
```
|
| 309 |
+
|
| 310 |
+
### 雲平台部署
|
| 311 |
+
|
| 312 |
+
#### Railway
|
| 313 |
+
1. 連接GitHub倉庫
|
| 314 |
+
2. 配置環境變量
|
| 315 |
+
3. 設置啟動命令:`uvicorn app:app --host 0.0.0.0 --port $PORT`
|
| 316 |
+
|
| 317 |
+
#### Render
|
| 318 |
+
1. 連接GitHub倉庫
|
| 319 |
+
2. 設置構建命令:`pip install -r requirements.txt`
|
| 320 |
+
3. 設置啟動命令:`uvicorn app:app --host 0.0.0.0 --port $PORT`
|
| 321 |
+
|
| 322 |
+
#### Heroku
|
| 323 |
+
```bash
|
| 324 |
+
# 創建Procfile
|
| 325 |
+
echo "web: uvicorn app:app --host 0.0.0.0 --port \$PORT" > Procfile
|
| 326 |
+
|
| 327 |
+
# 部署
|
| 328 |
+
git add .
|
| 329 |
+
git commit -m "Deploy to Heroku"
|
| 330 |
+
git push heroku main
|
| 331 |
+
```
|
| 332 |
+
|
| 333 |
+
## 監控與日誌
|
| 334 |
+
|
| 335 |
+
### 日誌配置
|
| 336 |
+
|
| 337 |
+
系統使用Python標準日誌庫,日誌級別可通過環境變量配置:
|
| 338 |
+
|
| 339 |
+
```bash
|
| 340 |
+
LOG_LEVEL=INFO # DEBUG, INFO, WARNING, ERROR
|
| 341 |
+
```
|
| 342 |
+
|
| 343 |
+
### 健康檢查
|
| 344 |
+
|
| 345 |
+
- `GET /health` - 服務健康狀態檢查
|
| 346 |
+
- 返回服務狀態、版本信息和數據庫連接狀態
|
| 347 |
+
|
| 348 |
+
### 監控建議
|
| 349 |
+
|
| 350 |
+
- 使用Prometheus + Grafana監控API性能
|
| 351 |
+
- 設置Supabase數據庫性能監控
|
| 352 |
+
- 配置Stripe支付監控和告警
|
| 353 |
+
|
| 354 |
+
## 安全考量
|
| 355 |
+
|
| 356 |
+
### 身份驗證
|
| 357 |
+
- JWT token過期機制
|
| 358 |
+
- Supabase Auth集成
|
| 359 |
+
- 角色基礎訪問控制
|
| 360 |
+
|
| 361 |
+
### 數據保護
|
| 362 |
+
- 敏感數據加密存儲
|
| 363 |
+
- HTTPS強制使用
|
| 364 |
+
- CORS配置優化
|
| 365 |
+
|
| 366 |
+
### API安全
|
| 367 |
+
- 請求頻率限制
|
| 368 |
+
- 輸入數據驗證
|
| 369 |
+
- SQL注入防護
|
| 370 |
+
|
| 371 |
+
## 故障排除
|
| 372 |
+
|
| 373 |
+
### 常見問題
|
| 374 |
+
|
| 375 |
+
1. **數據庫連接失敗**
|
| 376 |
+
- 檢查Supabase URL和密鑰
|
| 377 |
+
- 確認pgvector擴展已啟用
|
| 378 |
+
|
| 379 |
+
2. **OpenAI API錯誤**
|
| 380 |
+
- 驗證API密鑰有效性
|
| 381 |
+
- 檢查API配額限制
|
| 382 |
+
|
| 383 |
+
3. **Stripe支付問題**
|
| 384 |
+
- 確認Webhook URL配置正確
|
| 385 |
+
- 驗證Stripe密鑰設置
|
| 386 |
+
|
| 387 |
+
4. **Gradio界面無法訪問**
|
| 388 |
+
- 檢查端口配置
|
| 389 |
+
- 確認CORS設置
|
| 390 |
+
|
| 391 |
+
### 調試模式
|
| 392 |
+
|
| 393 |
+
```bash
|
| 394 |
+
# 啟用詳細日誌
|
| 395 |
+
export LOG_LEVEL=DEBUG
|
| 396 |
+
python app.py
|
| 397 |
+
|
| 398 |
+
# 或使用uvicorn
|
| 399 |
+
uvicorn app:app --reload --log-level debug
|
| 400 |
+
```
|
| 401 |
+
|
| 402 |
+
## 貢獻指南
|
| 403 |
+
|
| 404 |
+
### 開發流程
|
| 405 |
+
1. Fork項目
|
| 406 |
+
2. 創建功能分支
|
| 407 |
+
3. 提交變更
|
| 408 |
+
4. 創建Pull Request
|
| 409 |
+
|
| 410 |
+
### 代碼規範
|
| 411 |
+
- 遵循PEP 8 Python代碼風格
|
| 412 |
+
- 添加適當的文檔字符串
|
| 413 |
+
- 編寫單元測試
|
| 414 |
+
|
| 415 |
+
### 測試
|
| 416 |
+
```bash
|
| 417 |
+
# 運行測試
|
| 418 |
+
pytest tests/
|
| 419 |
+
|
| 420 |
+
# 測試覆蓋率
|
| 421 |
+
pytest --cov=app tests/
|
| 422 |
+
```
|
| 423 |
+
|
| 424 |
+
## 許可證
|
| 425 |
+
|
| 426 |
+
本項目採用MIT許可證 - 查看 [LICENSE](LICENSE) 文件了解詳情。
|
| 427 |
+
|
| 428 |
+
## 聯繫方式
|
| 429 |
+
|
| 430 |
+
- 項目作者:銀髮餐桌助手團隊
|
| 431 |
+
- 技術���持:通過GitHub Issues
|
| 432 |
+
- 文檔更新:歡迎提交PR
|
| 433 |
+
|
| 434 |
---
|
| 435 |
|
| 436 |
+
**重要提醒**:本系統僅提供營養建議,無法替代專業醫療諮詢。如有健康問題,請諮詢專業醫師。
|
app.py
ADDED
|
@@ -0,0 +1,649 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Main FastAPI application for Silver Table Assistant backend.
|
| 3 |
+
Provides REST API endpoints and Gradio interface for AI-powered nutrition consultation.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
import logging
|
| 8 |
+
from typing import List, Optional, Dict, Any
|
| 9 |
+
from uuid import UUID
|
| 10 |
+
import asyncio
|
| 11 |
+
|
| 12 |
+
# Load environment variables from .env file
|
| 13 |
+
from dotenv import load_dotenv
|
| 14 |
+
load_dotenv()
|
| 15 |
+
|
| 16 |
+
# FastAPI imports
|
| 17 |
+
from fastapi import FastAPI, Request, HTTPException, Depends, status
|
| 18 |
+
from fastapi.exceptions import RequestValidationError
|
| 19 |
+
from fastapi.responses import JSONResponse
|
| 20 |
+
from fastapi.middleware.cors import CORSMiddleware
|
| 21 |
+
from fastapi.responses import StreamingResponse, JSONResponse
|
| 22 |
+
from fastapi.staticfiles import StaticFiles
|
| 23 |
+
|
| 24 |
+
# Database imports
|
| 25 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 26 |
+
|
| 27 |
+
# Import local modules
|
| 28 |
+
from database import create_db_and_tables, get_session
|
| 29 |
+
from models import Profile, Order, Donation, MenuItem, ChatConversation
|
| 30 |
+
from schemas import (
|
| 31 |
+
ProfileCreate, ProfileUpdate, ProfileRead,
|
| 32 |
+
OrderCreate, OrderUpdate, OrderRead,
|
| 33 |
+
DonationCreate, DonationUpdate, DonationRead,
|
| 34 |
+
ChatRequest, ChatResponse,
|
| 35 |
+
MenuItemRead, APIResponse, HealthCheck
|
| 36 |
+
)
|
| 37 |
+
from dependencies import get_current_user, get_optional_user, require_roles, User
|
| 38 |
+
from exceptions import SilverTableException, PaymentException
|
| 39 |
+
from exceptions import handle_payment_error
|
| 40 |
+
from crud import (
|
| 41 |
+
get_profiles_by_user, create_profile, update_profile, delete_profile,
|
| 42 |
+
create_order, get_orders_by_profile,
|
| 43 |
+
create_donation, update_donation_status,
|
| 44 |
+
get_menu_items, get_dashboard_stats
|
| 45 |
+
)
|
| 46 |
+
from menu_data import get_menu_items, get_menu_item_by_id
|
| 47 |
+
from config import settings
|
| 48 |
+
|
| 49 |
+
# Import service modules
|
| 50 |
+
from chat_service import chat_stream, get_chat_service
|
| 51 |
+
from stripe_service import create_checkout_session_for_order, create_checkout_session_for_donation, handle_webhook
|
| 52 |
+
|
| 53 |
+
# Gradio imports
|
| 54 |
+
import gradio as gr
|
| 55 |
+
|
| 56 |
+
# Configure logging
|
| 57 |
+
logging.basicConfig(level=logging.INFO)
|
| 58 |
+
logger = logging.getLogger(__name__)
|
| 59 |
+
|
| 60 |
+
# ====== Lifespan Event Handler ======
|
| 61 |
+
|
| 62 |
+
from contextlib import asynccontextmanager
|
| 63 |
+
|
| 64 |
+
@asynccontextmanager
|
| 65 |
+
async def lifespan(app: FastAPI):
|
| 66 |
+
"""Handle application startup and shutdown events."""
|
| 67 |
+
# Startup logic
|
| 68 |
+
logger.info("Starting up Silver Table Assistant backend...")
|
| 69 |
+
|
| 70 |
+
# Try to create database tables, but continue if it fails (tables might already exist)
|
| 71 |
+
try:
|
| 72 |
+
await create_db_and_tables()
|
| 73 |
+
logger.info("Database tables created/verified successfully")
|
| 74 |
+
except Exception as e:
|
| 75 |
+
logger.warning(f"Database initialization warning (continuing anyway): {str(e)}")
|
| 76 |
+
|
| 77 |
+
# Yield control to the application
|
| 78 |
+
yield
|
| 79 |
+
|
| 80 |
+
# Shutdown logic
|
| 81 |
+
logger.info("Shutting down Silver Table Assistant backend...")
|
| 82 |
+
|
| 83 |
+
# Initialize FastAPI app with lifespan handler
|
| 84 |
+
app = FastAPI(
|
| 85 |
+
title="銀髮餐桌助手 API",
|
| 86 |
+
description="專為台灣銀髮族設計的AI營養飲食顧問服務",
|
| 87 |
+
version=settings.api_version,
|
| 88 |
+
docs_url="/api/docs",
|
| 89 |
+
redoc_url="/api/redoc",
|
| 90 |
+
lifespan=lifespan
|
| 91 |
+
)
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
# Custom exception handlers for consistent API responses
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
@app.exception_handler(RequestValidationError)
|
| 98 |
+
async def validation_exception_handler(request: Request, exc: RequestValidationError):
|
| 99 |
+
return JSONResponse(
|
| 100 |
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
| 101 |
+
content={"success": False, "message": "Request validation error", "details": exc.errors()}
|
| 102 |
+
)
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
@app.exception_handler(SilverTableException)
|
| 106 |
+
async def silvertable_exception_handler(request: Request, exc: SilverTableException):
|
| 107 |
+
# Return 400 for known application errors with friendly message
|
| 108 |
+
return JSONResponse(
|
| 109 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 110 |
+
content={"success": False, "message": exc.message, "details": exc.details}
|
| 111 |
+
)
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
@app.exception_handler(PaymentException)
|
| 115 |
+
async def payment_exception_handler(request: Request, exc: PaymentException):
|
| 116 |
+
# Return 400 for payment-related errors with friendly message
|
| 117 |
+
return JSONResponse(
|
| 118 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 119 |
+
content={"success": False, "message": exc.message, "details": exc.details}
|
| 120 |
+
)
|
| 121 |
+
|
| 122 |
+
# CORS middleware configuration
|
| 123 |
+
app.add_middleware(
|
| 124 |
+
CORSMiddleware,
|
| 125 |
+
allow_origins=settings.cors_origins,
|
| 126 |
+
allow_credentials=True,
|
| 127 |
+
allow_methods=["*"],
|
| 128 |
+
allow_headers=["*"],
|
| 129 |
+
)
|
| 130 |
+
|
| 131 |
+
# ====== Health Check Endpoint ======
|
| 132 |
+
|
| 133 |
+
@app.get("/health", response_model=HealthCheck)
|
| 134 |
+
async def health_check():
|
| 135 |
+
"""Health check endpoint for monitoring service status."""
|
| 136 |
+
return HealthCheck(
|
| 137 |
+
status="healthy",
|
| 138 |
+
timestamp=settings.get_current_timestamp(),
|
| 139 |
+
version=settings.api_version,
|
| 140 |
+
database="connected"
|
| 141 |
+
)
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
# ====== Profile Management Endpoints ======
|
| 145 |
+
|
| 146 |
+
@app.get("/api/profiles", response_model=List[ProfileRead])
|
| 147 |
+
async def get_profiles(
|
| 148 |
+
current_user: User = Depends(get_current_user),
|
| 149 |
+
db: AsyncSession = Depends(get_session)
|
| 150 |
+
):
|
| 151 |
+
"""Get all profiles for the current authenticated user."""
|
| 152 |
+
try:
|
| 153 |
+
profiles = await get_profiles_by_user(db, current_user.user_id)
|
| 154 |
+
return profiles
|
| 155 |
+
except Exception as e:
|
| 156 |
+
logger.error(f"Error fetching profiles: {str(e)}")
|
| 157 |
+
raise HTTPException(
|
| 158 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 159 |
+
detail="Failed to fetch profiles"
|
| 160 |
+
)
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
@app.post("/api/profiles", response_model=ProfileRead, status_code=status.HTTP_201_CREATED)
|
| 164 |
+
async def create_user_profile(
|
| 165 |
+
profile_data: ProfileCreate,
|
| 166 |
+
current_user: User = Depends(get_current_user),
|
| 167 |
+
db: AsyncSession = Depends(get_session)
|
| 168 |
+
):
|
| 169 |
+
"""Create or update a profile for the current authenticated user."""
|
| 170 |
+
try:
|
| 171 |
+
if profile_data.id:
|
| 172 |
+
# Update specific profile
|
| 173 |
+
profile = await update_profile(db, profile_data.id, profile_data)
|
| 174 |
+
if not profile:
|
| 175 |
+
# If ID was provided but not found, maybe it was deleted, create new
|
| 176 |
+
profile = await create_profile(db, profile_data, current_user.user_id)
|
| 177 |
+
else:
|
| 178 |
+
# Create new profile
|
| 179 |
+
profile = await create_profile(db, profile_data, current_user.user_id)
|
| 180 |
+
|
| 181 |
+
return profile
|
| 182 |
+
except Exception as e:
|
| 183 |
+
logger.error(f"Error creating/updating profile: {str(e)}")
|
| 184 |
+
raise HTTPException(
|
| 185 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 186 |
+
detail="Failed to create/update profile"
|
| 187 |
+
)
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
@app.put("/api/profiles/{profile_id}", response_model=ProfileRead)
|
| 191 |
+
async def update_user_profile(
|
| 192 |
+
profile_id: str,
|
| 193 |
+
profile_data: ProfileUpdate,
|
| 194 |
+
current_user: User = Depends(get_current_user),
|
| 195 |
+
db: AsyncSession = Depends(get_session)
|
| 196 |
+
):
|
| 197 |
+
"""
|
| 198 |
+
Update a specific profile by ID.
|
| 199 |
+
Requires authentication.
|
| 200 |
+
"""
|
| 201 |
+
try:
|
| 202 |
+
# Validate profile_id format
|
| 203 |
+
try:
|
| 204 |
+
profile_uuid = UUID(profile_id)
|
| 205 |
+
except ValueError:
|
| 206 |
+
raise HTTPException(
|
| 207 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 208 |
+
detail="Invalid profile ID format"
|
| 209 |
+
)
|
| 210 |
+
|
| 211 |
+
# Update the profile
|
| 212 |
+
profile = await update_profile(db, profile_uuid, profile_data)
|
| 213 |
+
|
| 214 |
+
if not profile:
|
| 215 |
+
raise HTTPException(
|
| 216 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 217 |
+
detail="Profile not found"
|
| 218 |
+
)
|
| 219 |
+
|
| 220 |
+
return profile
|
| 221 |
+
except HTTPException:
|
| 222 |
+
raise
|
| 223 |
+
except Exception as e:
|
| 224 |
+
logger.error(f"Error updating profile {profile_id}: {str(e)}")
|
| 225 |
+
raise HTTPException(
|
| 226 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 227 |
+
detail="Failed to update profile"
|
| 228 |
+
)
|
| 229 |
+
|
| 230 |
+
|
| 231 |
+
@app.delete("/api/profiles/{profile_id}", response_model=APIResponse)
|
| 232 |
+
async def delete_user_profile(
|
| 233 |
+
profile_id: str,
|
| 234 |
+
current_user: User = Depends(get_current_user),
|
| 235 |
+
db: AsyncSession = Depends(get_session)
|
| 236 |
+
):
|
| 237 |
+
"""
|
| 238 |
+
Delete a specific profile by ID.
|
| 239 |
+
Requires authentication.
|
| 240 |
+
"""
|
| 241 |
+
try:
|
| 242 |
+
# Validate profile_id format
|
| 243 |
+
try:
|
| 244 |
+
profile_uuid = UUID(profile_id)
|
| 245 |
+
except ValueError:
|
| 246 |
+
raise HTTPException(
|
| 247 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 248 |
+
detail="Invalid profile ID format"
|
| 249 |
+
)
|
| 250 |
+
|
| 251 |
+
# Delete the profile
|
| 252 |
+
deleted = await delete_profile(db, profile_uuid)
|
| 253 |
+
|
| 254 |
+
if not deleted:
|
| 255 |
+
raise HTTPException(
|
| 256 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 257 |
+
detail="Profile not found"
|
| 258 |
+
)
|
| 259 |
+
|
| 260 |
+
return APIResponse(
|
| 261 |
+
success=True,
|
| 262 |
+
message="Profile deleted successfully",
|
| 263 |
+
data={"profile_id": profile_id}
|
| 264 |
+
)
|
| 265 |
+
except HTTPException:
|
| 266 |
+
raise
|
| 267 |
+
except Exception as e:
|
| 268 |
+
logger.error(f"Error deleting profile {profile_id}: {str(e)}")
|
| 269 |
+
raise HTTPException(
|
| 270 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 271 |
+
detail="Failed to delete profile"
|
| 272 |
+
)
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
# ====== Chat Endpoints ======
|
| 276 |
+
|
| 277 |
+
class ChatRequestWithProfile(ChatRequest):
|
| 278 |
+
"""Extended chat request schema with profile_id."""
|
| 279 |
+
profile_id: Optional[str] = None
|
| 280 |
+
|
| 281 |
+
|
| 282 |
+
@app.post("/api/chat")
|
| 283 |
+
async def chat_with_assistant(
|
| 284 |
+
request: ChatRequestWithProfile,
|
| 285 |
+
current_user: User = Depends(get_current_user),
|
| 286 |
+
db: AsyncSession = Depends(get_session)
|
| 287 |
+
):
|
| 288 |
+
"""
|
| 289 |
+
Chat with the AI nutrition assistant.
|
| 290 |
+
Requires authentication and can use profile_id for personalized responses.
|
| 291 |
+
"""
|
| 292 |
+
try:
|
| 293 |
+
# Get or use provided profile_id
|
| 294 |
+
profile_id = request.profile_id
|
| 295 |
+
if not profile_id:
|
| 296 |
+
# Get user's first profile for personalization
|
| 297 |
+
profiles = await get_profiles_by_user(db, current_user.user_id)
|
| 298 |
+
if profiles:
|
| 299 |
+
profile_id = str(profiles[0].id)
|
| 300 |
+
|
| 301 |
+
# Create streaming response
|
| 302 |
+
return StreamingResponse(
|
| 303 |
+
chat_stream(
|
| 304 |
+
message=request.message,
|
| 305 |
+
profile_id=profile_id,
|
| 306 |
+
history=[{"role": "user", "content": request.message}] # Simplified history
|
| 307 |
+
),
|
| 308 |
+
media_type="text/plain",
|
| 309 |
+
headers={
|
| 310 |
+
"Cache-Control": "no-cache",
|
| 311 |
+
"Connection": "keep-alive",
|
| 312 |
+
"Transfer-Encoding": "chunked"
|
| 313 |
+
}
|
| 314 |
+
)
|
| 315 |
+
except Exception as e:
|
| 316 |
+
logger.error(f"Error in chat endpoint: {str(e)}")
|
| 317 |
+
raise HTTPException(
|
| 318 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 319 |
+
detail="Chat service temporarily unavailable"
|
| 320 |
+
)
|
| 321 |
+
|
| 322 |
+
|
| 323 |
+
# ====== Menu Endpoints ======
|
| 324 |
+
|
| 325 |
+
@app.get("/api/menu", response_model=List[Dict[str, Any]])
|
| 326 |
+
async def get_menu():
|
| 327 |
+
"""Get the complete menu of available food items."""
|
| 328 |
+
try:
|
| 329 |
+
menu_items = get_menu_items()
|
| 330 |
+
return menu_items
|
| 331 |
+
except Exception as e:
|
| 332 |
+
logger.error(f"Error fetching menu: {str(e)}")
|
| 333 |
+
raise HTTPException(
|
| 334 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 335 |
+
detail="Failed to fetch menu"
|
| 336 |
+
)
|
| 337 |
+
|
| 338 |
+
|
| 339 |
+
@app.get("/api/menu/{item_id}", response_model=Dict[str, Any])
|
| 340 |
+
async def get_menu_item(item_id: int):
|
| 341 |
+
"""
|
| 342 |
+
Get a specific menu item by ID.
|
| 343 |
+
|
| 344 |
+
Returns the menu item with all details including nutrition information.
|
| 345 |
+
"""
|
| 346 |
+
try:
|
| 347 |
+
menu_item = get_menu_item_by_id(item_id)
|
| 348 |
+
return menu_item
|
| 349 |
+
except ValueError:
|
| 350 |
+
raise HTTPException(
|
| 351 |
+
status_code=status.HTTP_404_NOT_FOUND,
|
| 352 |
+
detail=f"Menu item with ID {item_id} not found"
|
| 353 |
+
)
|
| 354 |
+
except Exception as e:
|
| 355 |
+
logger.error(f"Error fetching menu item {item_id}: {str(e)}")
|
| 356 |
+
raise HTTPException(
|
| 357 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 358 |
+
detail="Failed to fetch menu item"
|
| 359 |
+
)
|
| 360 |
+
|
| 361 |
+
|
| 362 |
+
# ====== Order Management Endpoints ======
|
| 363 |
+
|
| 364 |
+
@app.post("/api/orders", response_model=OrderRead)
|
| 365 |
+
async def create_order_endpoint(
|
| 366 |
+
order_data: OrderCreate,
|
| 367 |
+
current_user: User = Depends(get_current_user),
|
| 368 |
+
db: AsyncSession = Depends(get_session)
|
| 369 |
+
):
|
| 370 |
+
"""Create a new food order and initiate Stripe checkout."""
|
| 371 |
+
try:
|
| 372 |
+
# Get user's profile
|
| 373 |
+
profiles = await get_profiles_by_user(db, current_user.user_id)
|
| 374 |
+
if not profiles:
|
| 375 |
+
raise HTTPException(
|
| 376 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 377 |
+
detail="User profile required to create order"
|
| 378 |
+
)
|
| 379 |
+
|
| 380 |
+
profile = profiles[0]
|
| 381 |
+
|
| 382 |
+
# Create order in database
|
| 383 |
+
order = await create_order(db, order_data, profile.id)
|
| 384 |
+
|
| 385 |
+
# Create Stripe checkout session
|
| 386 |
+
checkout_url = create_checkout_session_for_order(order)
|
| 387 |
+
|
| 388 |
+
# Update order with Stripe session ID
|
| 389 |
+
from crud import update_order_status
|
| 390 |
+
await update_order_status(db, order.id, "pending", checkout_url.split('/')[-1])
|
| 391 |
+
|
| 392 |
+
return {
|
| 393 |
+
"id": order.id,
|
| 394 |
+
"profile_id": order.profile_id,
|
| 395 |
+
"items": order.items,
|
| 396 |
+
"total_amount": order.total_amount,
|
| 397 |
+
"status": "pending",
|
| 398 |
+
"stripe_session_id": checkout_url.split('/')[-1],
|
| 399 |
+
"created_at": order.created_at,
|
| 400 |
+
"updated_at": order.updated_at,
|
| 401 |
+
"checkout_url": checkout_url
|
| 402 |
+
}
|
| 403 |
+
except Exception as e:
|
| 404 |
+
logger.error(f"Error creating order: {str(e)}")
|
| 405 |
+
raise HTTPException(
|
| 406 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 407 |
+
detail="Failed to create order"
|
| 408 |
+
)
|
| 409 |
+
|
| 410 |
+
|
| 411 |
+
# ====== Donation Endpoints ======
|
| 412 |
+
|
| 413 |
+
@app.post("/api/donations", response_model=DonationRead)
|
| 414 |
+
async def create_donation_endpoint(
|
| 415 |
+
donation_data: DonationCreate,
|
| 416 |
+
current_user: Optional[User] = Depends(get_optional_user),
|
| 417 |
+
db: AsyncSession = Depends(get_session)
|
| 418 |
+
):
|
| 419 |
+
"""
|
| 420 |
+
Create a new donation and initiate Stripe checkout.
|
| 421 |
+
Supports both authenticated and anonymous donations.
|
| 422 |
+
"""
|
| 423 |
+
try:
|
| 424 |
+
# Validate donation minimum amount (donation_data.amount is in cents)
|
| 425 |
+
min_amount_cents = int(settings.MIN_DONATION_AMOUNT * 100)
|
| 426 |
+
if donation_data.amount < min_amount_cents:
|
| 427 |
+
raise PaymentException(f"捐款金額必須至少為 NT${settings.MIN_DONATION_AMOUNT} 元")
|
| 428 |
+
|
| 429 |
+
# Create donation in database
|
| 430 |
+
donation = await create_donation(db, donation_data)
|
| 431 |
+
|
| 432 |
+
# Create Stripe checkout session
|
| 433 |
+
checkout_url = create_checkout_session_for_donation(donation)
|
| 434 |
+
|
| 435 |
+
# Update donation with Stripe session ID
|
| 436 |
+
await update_donation_status(db, donation.id, "pending", checkout_url.split('/')[-1])
|
| 437 |
+
|
| 438 |
+
# Return the donation object with checkout URL added
|
| 439 |
+
return {
|
| 440 |
+
**donation.__dict__,
|
| 441 |
+
"checkout_url": checkout_url
|
| 442 |
+
}
|
| 443 |
+
except Exception as e:
|
| 444 |
+
logger.error(f"Error creating donation: {str(e)}")
|
| 445 |
+
raise HTTPException(
|
| 446 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 447 |
+
detail="Failed to create donation"
|
| 448 |
+
)
|
| 449 |
+
|
| 450 |
+
|
| 451 |
+
# ====== Stripe Webhook Endpoints ======
|
| 452 |
+
|
| 453 |
+
@app.post("/api/webhook")
|
| 454 |
+
async def stripe_webhook(request: Request):
|
| 455 |
+
"""Handle Stripe webhook events for payment confirmation."""
|
| 456 |
+
try:
|
| 457 |
+
payload = await request.body()
|
| 458 |
+
sig_header = request.headers.get("stripe-signature")
|
| 459 |
+
|
| 460 |
+
if not sig_header:
|
| 461 |
+
raise HTTPException(
|
| 462 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 463 |
+
detail="Missing Stripe signature header"
|
| 464 |
+
)
|
| 465 |
+
|
| 466 |
+
# Handle webhook
|
| 467 |
+
result = handle_webhook(payload, sig_header)
|
| 468 |
+
|
| 469 |
+
return JSONResponse(
|
| 470 |
+
status_code=200,
|
| 471 |
+
content={"status": "success", "result": result}
|
| 472 |
+
)
|
| 473 |
+
except Exception as e:
|
| 474 |
+
logger.error(f"Webhook error: {str(e)}")
|
| 475 |
+
raise HTTPException(
|
| 476 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 477 |
+
detail="Webhook processing failed"
|
| 478 |
+
)
|
| 479 |
+
|
| 480 |
+
|
| 481 |
+
@app.post("/api/stripe/webhook")
|
| 482 |
+
async def stripe_webhook_endpoint(request: Request):
|
| 483 |
+
"""Handle Stripe webhook events for payment confirmation.
|
| 484 |
+
|
| 485 |
+
This endpoint specifically handles Stripe webhook events with proper signature verification
|
| 486 |
+
and supports events like checkout.session.completed, payment_intent.succeeded, etc.
|
| 487 |
+
|
| 488 |
+
Expected webhook signing secret: STRIPE_WEBHOOK_SECRET environment variable
|
| 489 |
+
"""
|
| 490 |
+
try:
|
| 491 |
+
# Get the raw payload and signature header
|
| 492 |
+
payload = await request.body()
|
| 493 |
+
sig_header = request.headers.get("stripe-signature")
|
| 494 |
+
|
| 495 |
+
if not sig_header:
|
| 496 |
+
logger.error("Missing Stripe signature header in webhook request")
|
| 497 |
+
raise HTTPException(
|
| 498 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 499 |
+
detail="Missing Stripe signature header"
|
| 500 |
+
)
|
| 501 |
+
|
| 502 |
+
logger.info(f"Processing Stripe webhook with signature: {sig_header[:20]}...")
|
| 503 |
+
|
| 504 |
+
# Use the existing webhook handler from stripe_service
|
| 505 |
+
result = handle_webhook(payload, sig_header)
|
| 506 |
+
|
| 507 |
+
logger.info(f"Webhook processed successfully: {result.get('status', 'unknown')}")
|
| 508 |
+
|
| 509 |
+
return JSONResponse(
|
| 510 |
+
status_code=200,
|
| 511 |
+
content={
|
| 512 |
+
"status": "success",
|
| 513 |
+
"message": "Webhook processed successfully",
|
| 514 |
+
"result": result
|
| 515 |
+
}
|
| 516 |
+
)
|
| 517 |
+
|
| 518 |
+
except Exception as e:
|
| 519 |
+
logger.error(f"Stripe webhook error: {str(e)}")
|
| 520 |
+
|
| 521 |
+
# Return appropriate error response based on exception type
|
| 522 |
+
if "Invalid webhook signature" in str(e):
|
| 523 |
+
raise HTTPException(
|
| 524 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 525 |
+
detail="Invalid webhook signature"
|
| 526 |
+
)
|
| 527 |
+
else:
|
| 528 |
+
raise HTTPException(
|
| 529 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 530 |
+
detail=f"Webhook processing failed: {str(e)}"
|
| 531 |
+
)
|
| 532 |
+
|
| 533 |
+
|
| 534 |
+
# ====== Dashboard Endpoints ======
|
| 535 |
+
|
| 536 |
+
@app.get("/api/dashboard/{profile_id}")
|
| 537 |
+
async def get_user_dashboard(
|
| 538 |
+
profile_id: str,
|
| 539 |
+
current_user: User = Depends(require_roles(["family", "admin"])),
|
| 540 |
+
db: AsyncSession = Depends(get_session)
|
| 541 |
+
):
|
| 542 |
+
"""
|
| 543 |
+
Get nutrition dashboard data for a specific profile.
|
| 544 |
+
Requires family role or admin permissions.
|
| 545 |
+
"""
|
| 546 |
+
try:
|
| 547 |
+
# Validate profile_id format
|
| 548 |
+
try:
|
| 549 |
+
profile_uuid = UUID(profile_id)
|
| 550 |
+
except ValueError:
|
| 551 |
+
raise HTTPException(
|
| 552 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 553 |
+
detail="Invalid profile ID format"
|
| 554 |
+
)
|
| 555 |
+
|
| 556 |
+
# Get dashboard statistics
|
| 557 |
+
stats = await get_dashboard_stats(db, profile_uuid)
|
| 558 |
+
|
| 559 |
+
return stats
|
| 560 |
+
except Exception as e:
|
| 561 |
+
logger.error(f"Error fetching dashboard data: {str(e)}")
|
| 562 |
+
raise HTTPException(
|
| 563 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 564 |
+
detail="Failed to fetch dashboard data"
|
| 565 |
+
)
|
| 566 |
+
|
| 567 |
+
|
| 568 |
+
# ====== Gradio Chat Interface ======
|
| 569 |
+
|
| 570 |
+
async def gradio_chat(message: str, history: List[Dict[str, str]]):
|
| 571 |
+
"""
|
| 572 |
+
Adapt chat_service.chat_stream to Gradio's expected format.
|
| 573 |
+
"""
|
| 574 |
+
try:
|
| 575 |
+
chat_service = get_chat_service()
|
| 576 |
+
response = ""
|
| 577 |
+
# In Gradio 6.x, history is already a list of dictionaries with 'role' and 'content'
|
| 578 |
+
async for chunk in chat_service.chat_stream(message, history=history):
|
| 579 |
+
response += chunk
|
| 580 |
+
yield response
|
| 581 |
+
except Exception as e:
|
| 582 |
+
logger.error(f"Error in Gradio chat: {str(e)}")
|
| 583 |
+
yield "抱歉,系統暫時無法回應。請稍後再試。"
|
| 584 |
+
|
| 585 |
+
|
| 586 |
+
# Create Gradio interface
|
| 587 |
+
with gr.Blocks(title="銀髮餐桌助手", theme=gr.themes.Soft()) as gradio_demo:
|
| 588 |
+
gr.Markdown(
|
| 589 |
+
"""
|
| 590 |
+
# 銀髮餐桌助手 🥄
|
| 591 |
+
|
| 592 |
+
專為台灣銀髮族設計的AI營養飲食顧問
|
| 593 |
+
|
| 594 |
+
**功能特色:**
|
| 595 |
+
- 個人化營養建議
|
| 596 |
+
- 健康飲食指導
|
| 597 |
+
- 在地食材推薦
|
| 598 |
+
- 專業營養諮詢
|
| 599 |
+
|
| 600 |
+
**使用說明:**
|
| 601 |
+
無需登入即可開始對話,系統會根據您的健康狀況提供個人化建議。
|
| 602 |
+
"""
|
| 603 |
+
)
|
| 604 |
+
|
| 605 |
+
chatbot = gr.ChatInterface(
|
| 606 |
+
fn=gradio_chat,
|
| 607 |
+
type="messages",
|
| 608 |
+
title="營養飲食諮詢",
|
| 609 |
+
description="請輸入您的問題,例如:",
|
| 610 |
+
examples=[
|
| 611 |
+
"請問銀髮族應該如何補充蛋白質?",
|
| 612 |
+
"我爸爸有糖尿病,飲食上有什麼需要注意的?",
|
| 613 |
+
"推薦一些適合銀髮族的早餐選項",
|
| 614 |
+
"什麼食物對骨骼健康有好處?",
|
| 615 |
+
"如何製作軟嫩的料理?"
|
| 616 |
+
]
|
| 617 |
+
)
|
| 618 |
+
|
| 619 |
+
gr.Markdown(
|
| 620 |
+
"""
|
| 621 |
+
---
|
| 622 |
+
|
| 623 |
+
**重要提醒:**
|
| 624 |
+
- 本系統僅提供營養建議,無法替代專業醫療諮詢
|
| 625 |
+
- 如有健康問題,請諮詢專業醫師
|
| 626 |
+
- 建議遵循台灣衛福部的營養指導原則
|
| 627 |
+
"""
|
| 628 |
+
)
|
| 629 |
+
|
| 630 |
+
|
| 631 |
+
# Mount Gradio app to FastAPI
|
| 632 |
+
app = gr.mount_gradio_app(app, gradio_demo, path="/")
|
| 633 |
+
|
| 634 |
+
|
| 635 |
+
# ====== Main Entry Point ======
|
| 636 |
+
|
| 637 |
+
if __name__ == "__main__":
|
| 638 |
+
import uvicorn
|
| 639 |
+
|
| 640 |
+
logger.info(f"Starting Silver Table Assistant backend on {settings.host}:{settings.port}")
|
| 641 |
+
|
| 642 |
+
# Run the application
|
| 643 |
+
uvicorn.run(
|
| 644 |
+
"app:app",
|
| 645 |
+
host=settings.host,
|
| 646 |
+
port=settings.port,
|
| 647 |
+
reload=settings.is_development(),
|
| 648 |
+
log_level="info"
|
| 649 |
+
)
|
cache.py
ADDED
|
@@ -0,0 +1,249 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Caching module for Silver Table Assistant.
|
| 3 |
+
Provides in-memory caching for performance optimization.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import time
|
| 7 |
+
import hashlib
|
| 8 |
+
import json
|
| 9 |
+
from typing import Any, Optional, Dict, Callable
|
| 10 |
+
from functools import wraps
|
| 11 |
+
import asyncio
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class Cache:
|
| 15 |
+
"""Simple in-memory cache with TTL support."""
|
| 16 |
+
|
| 17 |
+
def __init__(self, default_ttl: int = 3600): # 1 hour default TTL
|
| 18 |
+
self._cache: Dict[str, Dict[str, Any]] = {}
|
| 19 |
+
self.default_ttl = default_ttl
|
| 20 |
+
|
| 21 |
+
def _generate_key(self, prefix: str, *args, **kwargs) -> str:
|
| 22 |
+
"""Generate a cache key from function arguments."""
|
| 23 |
+
# Convert args and kwargs to a sorted string representation
|
| 24 |
+
key_data = {
|
| 25 |
+
"args": args,
|
| 26 |
+
"kwargs": kwargs
|
| 27 |
+
}
|
| 28 |
+
key_string = json.dumps(key_data, sort_keys=True, default=str)
|
| 29 |
+
key_hash = hashlib.md5(key_string.encode()).hexdigest()
|
| 30 |
+
return f"{prefix}:{key_hash}"
|
| 31 |
+
|
| 32 |
+
def get(self, key: str) -> Optional[Any]:
|
| 33 |
+
"""Get value from cache if not expired."""
|
| 34 |
+
if key in self._cache:
|
| 35 |
+
cache_entry = self._cache[key]
|
| 36 |
+
if time.time() < cache_entry["expires_at"]:
|
| 37 |
+
return cache_entry["value"]
|
| 38 |
+
else:
|
| 39 |
+
# Remove expired entry
|
| 40 |
+
del self._cache[key]
|
| 41 |
+
return None
|
| 42 |
+
|
| 43 |
+
def set(self, key: str, value: Any, ttl: Optional[int] = None) -> None:
|
| 44 |
+
"""Set value in cache with TTL."""
|
| 45 |
+
ttl = ttl or self.default_ttl
|
| 46 |
+
self._cache[key] = {
|
| 47 |
+
"value": value,
|
| 48 |
+
"expires_at": time.time() + ttl
|
| 49 |
+
}
|
| 50 |
+
|
| 51 |
+
def delete(self, key: str) -> None:
|
| 52 |
+
"""Delete key from cache."""
|
| 53 |
+
if key in self._cache:
|
| 54 |
+
del self._cache[key]
|
| 55 |
+
|
| 56 |
+
def clear(self) -> None:
|
| 57 |
+
"""Clear all cache entries."""
|
| 58 |
+
self._cache.clear()
|
| 59 |
+
|
| 60 |
+
def cleanup_expired(self) -> int:
|
| 61 |
+
"""Remove all expired entries and return count removed."""
|
| 62 |
+
current_time = time.time()
|
| 63 |
+
expired_keys = [
|
| 64 |
+
key for key, entry in self._cache.items()
|
| 65 |
+
if current_time >= entry["expires_at"]
|
| 66 |
+
]
|
| 67 |
+
|
| 68 |
+
for key in expired_keys:
|
| 69 |
+
del self._cache[key]
|
| 70 |
+
|
| 71 |
+
return len(expired_keys)
|
| 72 |
+
|
| 73 |
+
def get_stats(self) -> Dict[str, Any]:
|
| 74 |
+
"""Get cache statistics."""
|
| 75 |
+
current_time = time.time()
|
| 76 |
+
total_entries = len(self._cache)
|
| 77 |
+
expired_entries = sum(
|
| 78 |
+
1 for entry in self._cache.values()
|
| 79 |
+
if current_time >= entry["expires_at"]
|
| 80 |
+
)
|
| 81 |
+
|
| 82 |
+
return {
|
| 83 |
+
"total_entries": total_entries,
|
| 84 |
+
"active_entries": total_entries - expired_entries,
|
| 85 |
+
"expired_entries": expired_entries,
|
| 86 |
+
"cache_hit_potential": "high" if total_entries > 100 else "medium" if total_entries > 10 else "low"
|
| 87 |
+
}
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
# Global cache instances
|
| 91 |
+
document_cache = Cache(default_ttl=1800) # 30 minutes for document queries
|
| 92 |
+
nutrition_cache = Cache(default_ttl=3600) # 1 hour for nutrition calculations
|
| 93 |
+
user_context_cache = Cache(default_ttl=900) # 15 minutes for user context
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def cache_result(cache_instance: Cache, prefix: str, ttl: Optional[int] = None):
|
| 97 |
+
"""Decorator to cache function results."""
|
| 98 |
+
|
| 99 |
+
def decorator(func: Callable):
|
| 100 |
+
@wraps(func)
|
| 101 |
+
async def async_wrapper(*args, **kwargs):
|
| 102 |
+
# Generate cache key
|
| 103 |
+
cache_key = cache_instance._generate_key(prefix, *args, **kwargs)
|
| 104 |
+
|
| 105 |
+
# Try to get from cache
|
| 106 |
+
cached_result = cache_instance.get(cache_key)
|
| 107 |
+
if cached_result is not None:
|
| 108 |
+
return cached_result
|
| 109 |
+
|
| 110 |
+
# Execute function and cache result
|
| 111 |
+
if asyncio.iscoroutinefunction(func):
|
| 112 |
+
result = await func(*args, **kwargs)
|
| 113 |
+
else:
|
| 114 |
+
result = func(*args, **kwargs)
|
| 115 |
+
|
| 116 |
+
cache_instance.set(cache_key, result, ttl)
|
| 117 |
+
return result
|
| 118 |
+
|
| 119 |
+
@wraps(func)
|
| 120 |
+
def sync_wrapper(*args, **kwargs):
|
| 121 |
+
# Generate cache key
|
| 122 |
+
cache_key = cache_instance._generate_key(prefix, *args, **kwargs)
|
| 123 |
+
|
| 124 |
+
# Try to get from cache
|
| 125 |
+
cached_result = cache_instance.get(cache_key)
|
| 126 |
+
if cached_result is not None:
|
| 127 |
+
return cached_result
|
| 128 |
+
|
| 129 |
+
# Execute function and cache result
|
| 130 |
+
result = func(*args, **kwargs)
|
| 131 |
+
cache_instance.set(cache_key, result, ttl)
|
| 132 |
+
return result
|
| 133 |
+
|
| 134 |
+
# Choose wrapper based on function type
|
| 135 |
+
if asyncio.iscoroutinefunction(func):
|
| 136 |
+
return async_wrapper
|
| 137 |
+
else:
|
| 138 |
+
return sync_wrapper
|
| 139 |
+
|
| 140 |
+
return decorator
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
class NutritionCache:
|
| 144 |
+
"""Specialized cache for nutrition calculations."""
|
| 145 |
+
|
| 146 |
+
@staticmethod
|
| 147 |
+
def get_menu_item_nutrition(menu_item_id: int) -> Optional[Dict[str, Any]]:
|
| 148 |
+
"""Get cached nutrition data for a menu item."""
|
| 149 |
+
key = f"nutrition:menu_item:{menu_item_id}"
|
| 150 |
+
return nutrition_cache.get(key)
|
| 151 |
+
|
| 152 |
+
@staticmethod
|
| 153 |
+
def set_menu_item_nutrition(menu_item_id: int, nutrition_data: Dict[str, Any], ttl: Optional[int] = None) -> None:
|
| 154 |
+
"""Cache nutrition data for a menu item."""
|
| 155 |
+
key = f"nutrition:menu_item:{menu_item_id}"
|
| 156 |
+
nutrition_cache.set(key, nutrition_data, ttl)
|
| 157 |
+
|
| 158 |
+
@staticmethod
|
| 159 |
+
def get_user_nutrition_summary(user_id: str, days: int = 7) -> Optional[Dict[str, Any]]:
|
| 160 |
+
"""Get cached nutrition summary for a user."""
|
| 161 |
+
key = f"nutrition:summary:{user_id}:{days}"
|
| 162 |
+
return nutrition_cache.get(key)
|
| 163 |
+
|
| 164 |
+
@staticmethod
|
| 165 |
+
def set_user_nutrition_summary(user_id: str, days: int, summary_data: Dict[str, Any], ttl: Optional[int] = None) -> None:
|
| 166 |
+
"""Cache nutrition summary for a user."""
|
| 167 |
+
key = f"nutrition:summary:{user_id}:{days}"
|
| 168 |
+
nutrition_cache.set(key, summary_data, ttl)
|
| 169 |
+
|
| 170 |
+
@staticmethod
|
| 171 |
+
def invalidate_user_nutrition(user_id: str) -> None:
|
| 172 |
+
"""Invalidate all nutrition cache for a user."""
|
| 173 |
+
# Remove all nutrition-related entries for this user
|
| 174 |
+
keys_to_remove = [
|
| 175 |
+
key for key in nutrition_cache._cache.keys()
|
| 176 |
+
if key.startswith(f"nutrition:summary:{user_id}")
|
| 177 |
+
]
|
| 178 |
+
for key in keys_to_remove:
|
| 179 |
+
nutrition_cache.delete(key)
|
| 180 |
+
|
| 181 |
+
|
| 182 |
+
class DocumentCache:
|
| 183 |
+
"""Specialized cache for document queries."""
|
| 184 |
+
|
| 185 |
+
@staticmethod
|
| 186 |
+
def get_relevant_documents(query: str, k: int, score_threshold: Optional[float] = None) -> Optional[list]:
|
| 187 |
+
"""Get cached document search results."""
|
| 188 |
+
threshold_key = f":{score_threshold}" if score_threshold else ""
|
| 189 |
+
key = f"documents:query:{hashlib.md5(query.encode()).hexdigest()}:k{k}{threshold_key}"
|
| 190 |
+
return document_cache.get(key)
|
| 191 |
+
|
| 192 |
+
@staticmethod
|
| 193 |
+
def set_relevant_documents(query: str, k: int, documents: list, score_threshold: Optional[float] = None, ttl: Optional[int] = None) -> None:
|
| 194 |
+
"""Cache document search results."""
|
| 195 |
+
threshold_key = f":{score_threshold}" if score_threshold else ""
|
| 196 |
+
key = f"documents:query:{hashlib.md5(query.encode()).hexdigest()}:k{k}{threshold_key}"
|
| 197 |
+
document_cache.set(key, documents, ttl)
|
| 198 |
+
|
| 199 |
+
@staticmethod
|
| 200 |
+
def invalidate_document_cache() -> None:
|
| 201 |
+
"""Invalidate all document cache entries."""
|
| 202 |
+
document_cache.clear()
|
| 203 |
+
|
| 204 |
+
|
| 205 |
+
class UserContextCache:
|
| 206 |
+
"""Specialized cache for user context data."""
|
| 207 |
+
|
| 208 |
+
@staticmethod
|
| 209 |
+
def get_user_context(user_id: str) -> Optional[Dict[str, Any]]:
|
| 210 |
+
"""Get cached user context."""
|
| 211 |
+
key = f"context:user:{user_id}"
|
| 212 |
+
return user_context_cache.get(key)
|
| 213 |
+
|
| 214 |
+
@staticmethod
|
| 215 |
+
def set_user_context(user_id: str, context_data: Dict[str, Any], ttl: Optional[int] = None) -> None:
|
| 216 |
+
"""Cache user context data."""
|
| 217 |
+
key = f"context:user:{user_id}"
|
| 218 |
+
user_context_cache.set(key, context_data, ttl)
|
| 219 |
+
|
| 220 |
+
@staticmethod
|
| 221 |
+
def invalidate_user_context(user_id: str) -> None:
|
| 222 |
+
"""Invalidate user context cache."""
|
| 223 |
+
key = f"context:user:{user_id}"
|
| 224 |
+
user_context_cache.delete(key)
|
| 225 |
+
|
| 226 |
+
|
| 227 |
+
# Cache management utilities
|
| 228 |
+
def get_cache_stats() -> Dict[str, Any]:
|
| 229 |
+
"""Get statistics for all caches."""
|
| 230 |
+
return {
|
| 231 |
+
"document_cache": document_cache.get_stats(),
|
| 232 |
+
"nutrition_cache": nutrition_cache.get_stats(),
|
| 233 |
+
"user_context_cache": user_context_cache.get_stats()
|
| 234 |
+
}
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
def cleanup_all_caches() -> Dict[str, int]:
|
| 238 |
+
"""Clean up expired entries from all caches."""
|
| 239 |
+
return {
|
| 240 |
+
"document_cache": document_cache.cleanup_expired(),
|
| 241 |
+
"nutrition_cache": nutrition_cache.cleanup_expired(),
|
| 242 |
+
"user_context_cache": user_context_cache.cleanup_expired()
|
| 243 |
+
}
|
| 244 |
+
|
| 245 |
+
|
| 246 |
+
def invalidate_user_cache(user_id: str) -> None:
|
| 247 |
+
"""Invalidate all cache entries for a specific user."""
|
| 248 |
+
NutritionCache.invalidate_user_nutrition(user_id)
|
| 249 |
+
UserContextCache.invalidate_user_context(user_id)
|
chat_service.py
ADDED
|
@@ -0,0 +1,445 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Chat service for Silver Table Assistant.
|
| 3 |
+
Provides AI-powered chat functionality with RAG integration and user context.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
import logging
|
| 8 |
+
from typing import List, Dict, Any, Optional, AsyncGenerator
|
| 9 |
+
from uuid import UUID
|
| 10 |
+
|
| 11 |
+
from langchain_openai import ChatOpenAI
|
| 12 |
+
from langchain_core.messages import HumanMessage, SystemMessage, AIMessage
|
| 13 |
+
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
|
| 14 |
+
|
| 15 |
+
import asyncio
|
| 16 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 17 |
+
|
| 18 |
+
from rag import get_rag_service
|
| 19 |
+
from crud import get_profile
|
| 20 |
+
from models import ChatConversation, Profile
|
| 21 |
+
from database import get_db_session
|
| 22 |
+
from config import settings
|
| 23 |
+
|
| 24 |
+
# Configure logging
|
| 25 |
+
logging.basicConfig(level=logging.INFO)
|
| 26 |
+
logger = logging.getLogger(__name__)
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class ChatService:
|
| 30 |
+
"""Chat service for AI-powered conversations with RAG integration."""
|
| 31 |
+
|
| 32 |
+
def __init__(self):
|
| 33 |
+
"""Initialize chat service with LiteLLM or OpenAI Chat model."""
|
| 34 |
+
# Environment variables
|
| 35 |
+
self.openai_api_key = os.getenv("OPENAI_API_KEY") or os.getenv("LITELLM_API_KEY", "sk-eT_04m428oAPUD5kUmIhVA")
|
| 36 |
+
self.openai_base_url = os.getenv("OPENAI_BASE_URL") or os.getenv("LITELLM_BASE_URL", "https://litellm-ekkks8gsocw.dgx-coolify.apmic.ai/")
|
| 37 |
+
|
| 38 |
+
if not self.openai_api_key:
|
| 39 |
+
raise ValueError("Missing required environment variable: OPENAI_API_KEY or LITELLM_API_KEY")
|
| 40 |
+
|
| 41 |
+
# Initialize ChatOpenAI model (works with LiteLLM compatible endpoints)
|
| 42 |
+
model_kwargs = {
|
| 43 |
+
"model": settings.ai_model_name,
|
| 44 |
+
"openai_api_key": self.openai_api_key,
|
| 45 |
+
"temperature": settings.ai_temperature,
|
| 46 |
+
"max_tokens": settings.ai_max_tokens,
|
| 47 |
+
}
|
| 48 |
+
|
| 49 |
+
# Add base_url for LiteLLM or Azure OpenAI
|
| 50 |
+
if self.openai_base_url:
|
| 51 |
+
model_kwargs["openai_api_base"] = self.openai_base_url
|
| 52 |
+
|
| 53 |
+
self.llm = ChatOpenAI(**model_kwargs)
|
| 54 |
+
logger.info(f"Initialized ChatOpenAI with base_url: {self.openai_base_url}, model: {settings.ai_model_name}")
|
| 55 |
+
|
| 56 |
+
# System prompt for the silver table assistant
|
| 57 |
+
self.system_prompt = self._create_system_prompt()
|
| 58 |
+
|
| 59 |
+
# RAG service
|
| 60 |
+
self.rag_service = get_rag_service()
|
| 61 |
+
|
| 62 |
+
def _create_system_prompt(self) -> str:
|
| 63 |
+
"""
|
| 64 |
+
Create the system prompt for the silver table assistant.
|
| 65 |
+
|
| 66 |
+
Returns:
|
| 67 |
+
System prompt string
|
| 68 |
+
"""
|
| 69 |
+
return """你是「銀髮餐桌助手」,專為台灣銀髮族設計的AI營養飲食顧問助手。
|
| 70 |
+
|
| 71 |
+
角色定位:
|
| 72 |
+
- 你是一位溫暖、耐心、專業的營養飲食顧問
|
| 73 |
+
- 專門為台灣銀髮族(65歲以上)提供飲食建議
|
| 74 |
+
- 熟悉台灣在地食材、飲食文化和生活習慣
|
| 75 |
+
|
| 76 |
+
核心原則:
|
| 77 |
+
1. 嚴格遵循台灣衛福部(MOHW)的營養指導原則和飲食指南
|
| 78 |
+
2. 僅提供營養建議,絕不進行醫療診斷或疾病診斷
|
| 79 |
+
3. 針對銀髮族的特殊營養需求(蛋白質、鈣質、維生素D、纖維等)
|
| 80 |
+
4. 考慮台灣在地飲食文化和可用食材
|
| 81 |
+
5. 語調溫和、耐心,像家中長輩般的關懷
|
| 82 |
+
|
| 83 |
+
重要限制:
|
| 84 |
+
- 絕不提供醫療診斷或疾病治療建議
|
| 85 |
+
- 涉及健康問題時,建議諮詢專業醫師
|
| 86 |
+
- 不推薦特定品牌或產品
|
| 87 |
+
- 基於科學證據和官方營養指南提供建議
|
| 88 |
+
|
| 89 |
+
回應風格:
|
| 90 |
+
- 使用繁體中文
|
| 91 |
+
- 語調溫暖親切
|
| 92 |
+
- 提供具體可行的建議
|
| 93 |
+
- 適時提供鼓勵和關懷
|
| 94 |
+
- 考慮使用者的年齡和健康狀況
|
| 95 |
+
|
| 96 |
+
當使用者詢問營養、飲食、食材選擇、烹調方式等相關問題時,請基於台灣衛福部的營養指導原則回答,並考慮使用者的個人健康狀況(如果有提供的話)。"""
|
| 97 |
+
|
| 98 |
+
async def get_user_context(self, profile_id: Optional[UUID] = None) -> Dict[str, Any]:
|
| 99 |
+
"""
|
| 100 |
+
Get user context information for personalized responses.
|
| 101 |
+
|
| 102 |
+
Args:
|
| 103 |
+
profile_id: User profile ID
|
| 104 |
+
|
| 105 |
+
Returns:
|
| 106 |
+
Dictionary with user context information
|
| 107 |
+
"""
|
| 108 |
+
context = {
|
| 109 |
+
"has_profile": False,
|
| 110 |
+
"age": None,
|
| 111 |
+
"health_conditions": None,
|
| 112 |
+
"dietary_restrictions": None
|
| 113 |
+
}
|
| 114 |
+
|
| 115 |
+
if profile_id:
|
| 116 |
+
try:
|
| 117 |
+
# Get database session
|
| 118 |
+
async with get_db_session() as db:
|
| 119 |
+
profile = await get_profile(db, profile_id)
|
| 120 |
+
if profile:
|
| 121 |
+
context.update({
|
| 122 |
+
"has_profile": True,
|
| 123 |
+
"age": profile.age,
|
| 124 |
+
"health_conditions": profile.health_condition,
|
| 125 |
+
"dietary_restrictions": profile.dietary_restrictions,
|
| 126 |
+
"display_name": profile.display_name
|
| 127 |
+
})
|
| 128 |
+
logger.info(f"Retrieved profile context for user {profile_id}")
|
| 129 |
+
else:
|
| 130 |
+
logger.warning(f"Profile not found for ID: {profile_id}")
|
| 131 |
+
except Exception as e:
|
| 132 |
+
logger.error(f"Error retrieving user profile: {str(e)}")
|
| 133 |
+
|
| 134 |
+
return context
|
| 135 |
+
|
| 136 |
+
def format_context_information(self, user_context: Dict[str, Any], relevant_docs: List[Any]) -> str:
|
| 137 |
+
"""
|
| 138 |
+
Format context information for the AI prompt.
|
| 139 |
+
|
| 140 |
+
Args:
|
| 141 |
+
user_context: User context dictionary
|
| 142 |
+
relevant_docs: Relevant documents from RAG
|
| 143 |
+
|
| 144 |
+
Returns:
|
| 145 |
+
Formatted context string
|
| 146 |
+
"""
|
| 147 |
+
context_parts = []
|
| 148 |
+
|
| 149 |
+
# Add user context if available
|
| 150 |
+
if user_context["has_profile"]:
|
| 151 |
+
context_parts.append("使用者背景資訊:")
|
| 152 |
+
if user_context.get("display_name"):
|
| 153 |
+
context_parts.append(f"- 姓名:{user_context['display_name']}")
|
| 154 |
+
if user_context.get("age"):
|
| 155 |
+
context_parts.append(f"- 年齡:{user_context['age']}歲")
|
| 156 |
+
if user_context.get("health_conditions"):
|
| 157 |
+
context_parts.append(f"- 健康狀況:{user_context['health_conditions']}")
|
| 158 |
+
if user_context.get("dietary_restrictions"):
|
| 159 |
+
context_parts.append(f"- 飲食限制:{user_context['dietary_restrictions']}")
|
| 160 |
+
context_parts.append("")
|
| 161 |
+
|
| 162 |
+
# Add relevant documents
|
| 163 |
+
if relevant_docs:
|
| 164 |
+
context_parts.append("相關營養指南資訊:")
|
| 165 |
+
for i, doc in enumerate(relevant_docs, 1):
|
| 166 |
+
source = doc.metadata.get("file_name", "未知來源")
|
| 167 |
+
content = doc.page_content.strip()
|
| 168 |
+
# Limit content length to avoid token overflow
|
| 169 |
+
if len(content) > 500:
|
| 170 |
+
content = content[:500] + "..."
|
| 171 |
+
context_parts.append(f"{i}. 來源:{source}")
|
| 172 |
+
context_parts.append(f" 內容:{content}")
|
| 173 |
+
context_parts.append("")
|
| 174 |
+
|
| 175 |
+
return "\n".join(context_parts)
|
| 176 |
+
|
| 177 |
+
async def chat_stream(
|
| 178 |
+
self,
|
| 179 |
+
message: str,
|
| 180 |
+
profile_id: Optional[str] = None,
|
| 181 |
+
history: List[Dict[str, str]] = None
|
| 182 |
+
) -> AsyncGenerator[str, None]:
|
| 183 |
+
"""
|
| 184 |
+
Stream chat response with context and RAG integration.
|
| 185 |
+
|
| 186 |
+
Args:
|
| 187 |
+
message: User message
|
| 188 |
+
profile_id: Optional user profile ID for personalization
|
| 189 |
+
history: Chat history messages
|
| 190 |
+
|
| 191 |
+
Yields:
|
| 192 |
+
Response content chunks
|
| 193 |
+
"""
|
| 194 |
+
try:
|
| 195 |
+
# Convert profile_id to UUID if provided
|
| 196 |
+
profile_uuid = None
|
| 197 |
+
if profile_id:
|
| 198 |
+
try:
|
| 199 |
+
profile_uuid = UUID(profile_id)
|
| 200 |
+
except ValueError:
|
| 201 |
+
logger.warning(f"Invalid profile ID format: {profile_id}")
|
| 202 |
+
|
| 203 |
+
# Get user context
|
| 204 |
+
user_context = await self.get_user_context(profile_uuid)
|
| 205 |
+
|
| 206 |
+
# Get relevant documents from RAG
|
| 207 |
+
relevant_docs = await self.rag_service.get_relevant_documents(message, k=6)
|
| 208 |
+
|
| 209 |
+
# Format context information
|
| 210 |
+
context_info = self.format_context_information(user_context, relevant_docs)
|
| 211 |
+
|
| 212 |
+
# Prepare message history
|
| 213 |
+
messages = []
|
| 214 |
+
|
| 215 |
+
# Add system message with context
|
| 216 |
+
if context_info:
|
| 217 |
+
system_content = f"{self.system_prompt}\n\n背景資訊:\n{context_info}"
|
| 218 |
+
else:
|
| 219 |
+
system_content = self.system_prompt
|
| 220 |
+
|
| 221 |
+
messages.append(SystemMessage(content=system_content))
|
| 222 |
+
|
| 223 |
+
# Add chat history
|
| 224 |
+
if history:
|
| 225 |
+
for msg in history:
|
| 226 |
+
if msg["role"] == "user":
|
| 227 |
+
messages.append(HumanMessage(content=msg["content"]))
|
| 228 |
+
elif msg["role"] == "assistant":
|
| 229 |
+
messages.append(AIMessage(content=msg["content"]))
|
| 230 |
+
|
| 231 |
+
# Add current user message
|
| 232 |
+
messages.append(HumanMessage(content=message))
|
| 233 |
+
|
| 234 |
+
# Stream response from LLM
|
| 235 |
+
logger.info(f"Generating chat response for message: '{message[:50]}...'")
|
| 236 |
+
|
| 237 |
+
full_response = ""
|
| 238 |
+
async for chunk in self.llm.astream(messages):
|
| 239 |
+
if hasattr(chunk, "content") and chunk.content:
|
| 240 |
+
full_response += chunk.content
|
| 241 |
+
yield chunk.content
|
| 242 |
+
|
| 243 |
+
# Log the interaction with both message and response
|
| 244 |
+
if full_response:
|
| 245 |
+
await self._log_conversation(
|
| 246 |
+
message=message,
|
| 247 |
+
response=full_response,
|
| 248 |
+
profile_id=profile_uuid,
|
| 249 |
+
user_context=user_context,
|
| 250 |
+
relevant_docs_count=len(relevant_docs)
|
| 251 |
+
)
|
| 252 |
+
|
| 253 |
+
except Exception as e:
|
| 254 |
+
logger.error(f"Error in chat stream: {str(e)}")
|
| 255 |
+
yield "抱歉,系統發生了一些問題。請稍後再試。"
|
| 256 |
+
|
| 257 |
+
async def _log_conversation(
|
| 258 |
+
self,
|
| 259 |
+
message: str,
|
| 260 |
+
response: str,
|
| 261 |
+
profile_id: Optional[UUID],
|
| 262 |
+
user_context: Dict[str, Any],
|
| 263 |
+
relevant_docs_count: int
|
| 264 |
+
) -> None:
|
| 265 |
+
"""
|
| 266 |
+
Log conversation to database for analytics and improvement.
|
| 267 |
+
|
| 268 |
+
Args:
|
| 269 |
+
message: User message
|
| 270 |
+
response: AI response
|
| 271 |
+
profile_id: User profile ID
|
| 272 |
+
user_context: User context information
|
| 273 |
+
relevant_docs_count: Number of relevant documents found
|
| 274 |
+
"""
|
| 275 |
+
try:
|
| 276 |
+
# Prepare metadata
|
| 277 |
+
metadata = {
|
| 278 |
+
"user_context": user_context,
|
| 279 |
+
"relevant_docs_count": relevant_docs_count,
|
| 280 |
+
"timestamp": settings.get_current_timestamp()
|
| 281 |
+
}
|
| 282 |
+
|
| 283 |
+
# Log conversation with both message and response
|
| 284 |
+
await self._save_conversation(
|
| 285 |
+
profile_id=profile_id,
|
| 286 |
+
message=message,
|
| 287 |
+
response=response,
|
| 288 |
+
meta_data=metadata
|
| 289 |
+
)
|
| 290 |
+
|
| 291 |
+
except Exception as e:
|
| 292 |
+
logger.error(f"Error logging conversation: {str(e)}")
|
| 293 |
+
|
| 294 |
+
async def _save_conversation(
|
| 295 |
+
self,
|
| 296 |
+
profile_id: Optional[UUID],
|
| 297 |
+
message: str,
|
| 298 |
+
response: Optional[str] = None,
|
| 299 |
+
meta_data: Optional[Dict[str, Any]] = None
|
| 300 |
+
) -> None:
|
| 301 |
+
"""
|
| 302 |
+
Save chat conversation to database.
|
| 303 |
+
|
| 304 |
+
Args:
|
| 305 |
+
profile_id: User profile ID
|
| 306 |
+
message: User message
|
| 307 |
+
response: AI response (optional)
|
| 308 |
+
meta_data: Additional metadata
|
| 309 |
+
"""
|
| 310 |
+
try:
|
| 311 |
+
async with get_db_session() as db:
|
| 312 |
+
conversation = ChatConversation(
|
| 313 |
+
profile_id=profile_id,
|
| 314 |
+
message=message,
|
| 315 |
+
response=response,
|
| 316 |
+
meta_data=meta_data or {}
|
| 317 |
+
)
|
| 318 |
+
|
| 319 |
+
db.add(conversation)
|
| 320 |
+
await db.commit()
|
| 321 |
+
|
| 322 |
+
except Exception as e:
|
| 323 |
+
logger.error(f"Error saving conversation to database: {str(e)}")
|
| 324 |
+
|
| 325 |
+
async def get_chat_history(
|
| 326 |
+
self,
|
| 327 |
+
profile_id: Optional[UUID],
|
| 328 |
+
session_id: str,
|
| 329 |
+
limit: int = 50
|
| 330 |
+
) -> List[Dict[str, str]]:
|
| 331 |
+
"""
|
| 332 |
+
Get chat history for a session.
|
| 333 |
+
|
| 334 |
+
Args:
|
| 335 |
+
profile_id: User profile ID
|
| 336 |
+
session_id: Chat session ID
|
| 337 |
+
limit: Maximum number of messages to return
|
| 338 |
+
|
| 339 |
+
Returns:
|
| 340 |
+
List of chat messages
|
| 341 |
+
"""
|
| 342 |
+
try:
|
| 343 |
+
async with get_db_session() as db:
|
| 344 |
+
from sqlalchemy import select
|
| 345 |
+
|
| 346 |
+
query = (
|
| 347 |
+
select(ChatConversation)
|
| 348 |
+
.order_by(ChatConversation.created_at.asc())
|
| 349 |
+
.limit(limit)
|
| 350 |
+
)
|
| 351 |
+
|
| 352 |
+
if profile_id:
|
| 353 |
+
query = query.where(ChatConversation.profile_id == profile_id)
|
| 354 |
+
|
| 355 |
+
result = await db.execute(query)
|
| 356 |
+
conversations = result.scalars().all()
|
| 357 |
+
|
| 358 |
+
# Return user messages and AI responses
|
| 359 |
+
chat_history = []
|
| 360 |
+
for conv in conversations:
|
| 361 |
+
if conv.message:
|
| 362 |
+
chat_history.append({
|
| 363 |
+
"role": "user",
|
| 364 |
+
"content": conv.message,
|
| 365 |
+
"timestamp": conv.created_at.isoformat()
|
| 366 |
+
})
|
| 367 |
+
if conv.response:
|
| 368 |
+
chat_history.append({
|
| 369 |
+
"role": "assistant",
|
| 370 |
+
"content": conv.response,
|
| 371 |
+
"timestamp": conv.created_at.isoformat()
|
| 372 |
+
})
|
| 373 |
+
|
| 374 |
+
return chat_history
|
| 375 |
+
|
| 376 |
+
except Exception as e:
|
| 377 |
+
logger.error(f"Error getting chat history: {str(e)}")
|
| 378 |
+
return []
|
| 379 |
+
|
| 380 |
+
|
| 381 |
+
# Global chat service instance
|
| 382 |
+
chat_service: Optional[ChatService] = None
|
| 383 |
+
|
| 384 |
+
|
| 385 |
+
def get_chat_service() -> ChatService:
|
| 386 |
+
"""
|
| 387 |
+
Get or create the global chat service instance.
|
| 388 |
+
|
| 389 |
+
Returns:
|
| 390 |
+
ChatService instance
|
| 391 |
+
"""
|
| 392 |
+
global chat_service
|
| 393 |
+
if chat_service is None:
|
| 394 |
+
chat_service = ChatService()
|
| 395 |
+
return chat_service
|
| 396 |
+
|
| 397 |
+
|
| 398 |
+
# Convenience function for backward compatibility
|
| 399 |
+
async def chat_stream(
|
| 400 |
+
message: str,
|
| 401 |
+
profile_id: Optional[str] = None,
|
| 402 |
+
history: List[Dict[str, str]] = None
|
| 403 |
+
) -> AsyncGenerator[str, None]:
|
| 404 |
+
"""Stream chat response."""
|
| 405 |
+
service = get_chat_service()
|
| 406 |
+
async for chunk in service.chat_stream(message, profile_id, history):
|
| 407 |
+
yield chunk
|
| 408 |
+
|
| 409 |
+
|
| 410 |
+
if __name__ == "__main__":
|
| 411 |
+
"""
|
| 412 |
+
Main block for testing chat service functionality.
|
| 413 |
+
"""
|
| 414 |
+
async def main():
|
| 415 |
+
"""Main function for testing."""
|
| 416 |
+
print("Testing Chat Service...")
|
| 417 |
+
|
| 418 |
+
try:
|
| 419 |
+
# Initialize chat service
|
| 420 |
+
service = get_chat_service()
|
| 421 |
+
|
| 422 |
+
# Test messages
|
| 423 |
+
test_messages = [
|
| 424 |
+
"請問銀髮族應該如何補充蛋白質?",
|
| 425 |
+
"我爸爸有糖尿病,飲食上有什麼需要注意的?",
|
| 426 |
+
"推薦一些適合銀髮族的早餐選項"
|
| 427 |
+
]
|
| 428 |
+
|
| 429 |
+
for i, message in enumerate(test_messages, 1):
|
| 430 |
+
print(f"\n--- 測試對話 {i} ---")
|
| 431 |
+
print(f"使用者:{message}")
|
| 432 |
+
print("助手:", end="", flush=True)
|
| 433 |
+
|
| 434 |
+
# Stream response
|
| 435 |
+
async for chunk in service.chat_stream(message):
|
| 436 |
+
print(chunk, end="", flush=True)
|
| 437 |
+
|
| 438 |
+
print("\n" + "="*50)
|
| 439 |
+
|
| 440 |
+
except Exception as e:
|
| 441 |
+
print(f"Error: {str(e)}")
|
| 442 |
+
raise
|
| 443 |
+
|
| 444 |
+
# Run the main function
|
| 445 |
+
asyncio.run(main())
|
config.py
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Configuration management for Silver Table Assistant.
|
| 3 |
+
Centralized settings with environment variable support.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
from datetime import datetime
|
| 8 |
+
from typing import List
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class Settings:
|
| 12 |
+
"""Application settings with environment variable support."""
|
| 13 |
+
|
| 14 |
+
def __init__(self):
|
| 15 |
+
# Database Configuration
|
| 16 |
+
self.supabase_url = os.getenv("SUPABASE_URL")
|
| 17 |
+
self.supabase_service_role_key = os.getenv("SUPABASE_SERVICE_ROLE_KEY")
|
| 18 |
+
|
| 19 |
+
# AI Service Configuration
|
| 20 |
+
self.openai_api_key = os.getenv("OPENAI_API_KEY")
|
| 21 |
+
self.openai_base_url = os.getenv("OPENAI_BASE_URL", "https://api.openai.com/v1")
|
| 22 |
+
|
| 23 |
+
# LiteLLM Configuration (alternative to OpenAI)
|
| 24 |
+
self.litellm_base_url = os.getenv("LITELLM_BASE_URL")
|
| 25 |
+
self.litellm_api_key = os.getenv("LITELLM_API_KEY")
|
| 26 |
+
self.litellm_model = os.getenv("LITELLM_MODEL", "azure-gpt-4.1")
|
| 27 |
+
|
| 28 |
+
# Payment Service Configuration
|
| 29 |
+
self.stripe_secret_key = os.getenv("STRIPE_SECRET_KEY")
|
| 30 |
+
self.stripe_publishable_key = os.getenv("STRIPE_PUBLISHABLE_KEY")
|
| 31 |
+
self.stripe_webhook_secret = os.getenv("STRIPE_WEBHOOK_SECRET")
|
| 32 |
+
self.stripe_default_currency = "twd" # Taiwan Dollar
|
| 33 |
+
|
| 34 |
+
# Application Configuration
|
| 35 |
+
self.frontend_url = os.getenv("FRONTEND_URL", "http://localhost:5173")
|
| 36 |
+
self.api_version = os.getenv("API_VERSION", "1.0.0")
|
| 37 |
+
self.environment = os.getenv("ENVIRONMENT", "development")
|
| 38 |
+
|
| 39 |
+
# Server Configuration
|
| 40 |
+
self.host = os.getenv("HOST", "0.0.0.0")
|
| 41 |
+
self.port = int(os.getenv("PORT", "8000"))
|
| 42 |
+
|
| 43 |
+
# AI Model Configuration (for LiteLLM)
|
| 44 |
+
self.ai_model_name = os.getenv("AI_MODEL_NAME", "azure-gpt-4.1")
|
| 45 |
+
self.ai_max_tokens = 2000
|
| 46 |
+
self.ai_temperature = 0.7
|
| 47 |
+
|
| 48 |
+
# Currency Configuration
|
| 49 |
+
self.default_currency = "twd"
|
| 50 |
+
self.currency_symbol = "NT$"
|
| 51 |
+
self.min_order_amount = 1000 # cents (NT$10)
|
| 52 |
+
self.max_order_amount = 1000000 # cents (NT$10,000)
|
| 53 |
+
self.min_donation_amount = 50 # NT$50 minimum for Stripe
|
| 54 |
+
# Backwards-compatible uppercase constant
|
| 55 |
+
self.MIN_DONATION_AMOUNT = self.min_donation_amount
|
| 56 |
+
|
| 57 |
+
# Security Configuration
|
| 58 |
+
self.jwt_secret_key = os.getenv("JWT_SECRET_KEY", "your-secret-key-here")
|
| 59 |
+
self.access_token_expire_minutes = 30
|
| 60 |
+
|
| 61 |
+
# CORS Configuration
|
| 62 |
+
self.cors_origins = self._parse_cors_origins()
|
| 63 |
+
|
| 64 |
+
# Validation
|
| 65 |
+
self._validate_required_settings()
|
| 66 |
+
|
| 67 |
+
def _parse_cors_origins(self) -> List[str]:
|
| 68 |
+
"""Parse CORS origins from environment variable."""
|
| 69 |
+
origins_str = os.getenv("CORS_ORIGINS", "http://localhost:3000,http://localhost:5173")
|
| 70 |
+
return [origin.strip() for origin in origins_str.split(",")]
|
| 71 |
+
|
| 72 |
+
def _validate_required_settings(self):
|
| 73 |
+
"""Validate required environment variables."""
|
| 74 |
+
required_vars = [
|
| 75 |
+
"SUPABASE_URL",
|
| 76 |
+
"SUPABASE_SERVICE_ROLE_KEY",
|
| 77 |
+
"OPENAI_API_KEY",
|
| 78 |
+
"STRIPE_SECRET_KEY",
|
| 79 |
+
"STRIPE_WEBHOOK_SECRET"
|
| 80 |
+
]
|
| 81 |
+
|
| 82 |
+
missing_vars = []
|
| 83 |
+
for var in required_vars:
|
| 84 |
+
if not os.getenv(var):
|
| 85 |
+
missing_vars.append(var)
|
| 86 |
+
|
| 87 |
+
if missing_vars:
|
| 88 |
+
raise ValueError(f"Missing required environment variables: {', '.join(missing_vars)}")
|
| 89 |
+
|
| 90 |
+
def get_current_timestamp(self) -> str:
|
| 91 |
+
"""Get current timestamp in ISO format."""
|
| 92 |
+
return datetime.utcnow().isoformat() + "Z"
|
| 93 |
+
|
| 94 |
+
def is_development(self) -> bool:
|
| 95 |
+
"""Check if running in development mode."""
|
| 96 |
+
return self.environment.lower() == "development"
|
| 97 |
+
|
| 98 |
+
def is_production(self) -> bool:
|
| 99 |
+
"""Check if running in production mode."""
|
| 100 |
+
return self.environment.lower() == "production"
|
| 101 |
+
|
| 102 |
+
def format_currency(self, amount_cents: int) -> str:
|
| 103 |
+
"""Format amount in cents to currency string."""
|
| 104 |
+
return f"{self.currency_symbol}{amount_cents / 100:.2f}"
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
# Global settings instance
|
| 108 |
+
settings = Settings()
|
crud.py
ADDED
|
@@ -0,0 +1,453 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
CRUD operations for Silver Table Assistant backend.
|
| 3 |
+
Provides async database operations for profiles, orders, and donations.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
from typing import List, Optional, Dict, Any
|
| 7 |
+
from datetime import datetime, timedelta
|
| 8 |
+
from uuid import UUID, uuid4
|
| 9 |
+
|
| 10 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 11 |
+
from sqlalchemy import select, func
|
| 12 |
+
from sqlmodel import col
|
| 13 |
+
|
| 14 |
+
from models import Profile, Order, Donation, MenuItem
|
| 15 |
+
from schemas import (
|
| 16 |
+
ProfileCreate, ProfileUpdate, ProfileRead,
|
| 17 |
+
OrderCreate, OrderUpdate, OrderRead,
|
| 18 |
+
DonationCreate, DonationUpdate, DonationRead
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
# ========== Profile CRUD Operations ==========
|
| 23 |
+
|
| 24 |
+
async def get_profile(db: AsyncSession, profile_id: UUID) -> Optional[Profile]:
|
| 25 |
+
"""
|
| 26 |
+
Get a profile by ID.
|
| 27 |
+
|
| 28 |
+
Args:
|
| 29 |
+
db: Database session
|
| 30 |
+
profile_id: Profile ID
|
| 31 |
+
|
| 32 |
+
Returns:
|
| 33 |
+
Profile object or None if not found
|
| 34 |
+
"""
|
| 35 |
+
result = await db.get(Profile, profile_id)
|
| 36 |
+
return result
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
async def get_profiles_by_user(db: AsyncSession, user_id: UUID) -> List[Profile]:
|
| 40 |
+
"""
|
| 41 |
+
Get all profiles for a specific user.
|
| 42 |
+
|
| 43 |
+
Args:
|
| 44 |
+
db: Database session
|
| 45 |
+
user_id: Supabase Auth user ID
|
| 46 |
+
|
| 47 |
+
Returns:
|
| 48 |
+
List of Profile objects
|
| 49 |
+
"""
|
| 50 |
+
query = select(Profile).where(Profile.user_id == user_id)
|
| 51 |
+
result = await db.execute(query)
|
| 52 |
+
return result.scalars().all()
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
async def create_profile(db: AsyncSession, profile_data: ProfileCreate, user_id: UUID) -> Profile:
|
| 56 |
+
"""
|
| 57 |
+
Create a new profile.
|
| 58 |
+
|
| 59 |
+
Args:
|
| 60 |
+
db: Database session
|
| 61 |
+
profile_data: Profile creation data
|
| 62 |
+
user_id: Supabase Auth user ID
|
| 63 |
+
|
| 64 |
+
Returns:
|
| 65 |
+
Created Profile object
|
| 66 |
+
"""
|
| 67 |
+
profile_dict = profile_data.dict()
|
| 68 |
+
profile_dict["user_id"] = user_id
|
| 69 |
+
profile_dict["id"] = uuid4() # Manually generate UUID for primary key
|
| 70 |
+
|
| 71 |
+
profile = Profile(**profile_dict)
|
| 72 |
+
db.add(profile)
|
| 73 |
+
await db.commit()
|
| 74 |
+
await db.refresh(profile)
|
| 75 |
+
|
| 76 |
+
return profile
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
async def update_profile(db: AsyncSession, profile_id: UUID, profile_data: ProfileUpdate) -> Optional[Profile]:
|
| 80 |
+
"""
|
| 81 |
+
Update an existing profile.
|
| 82 |
+
|
| 83 |
+
Args:
|
| 84 |
+
db: Database session
|
| 85 |
+
profile_id: Profile ID
|
| 86 |
+
profile_data: Profile update data
|
| 87 |
+
|
| 88 |
+
Returns:
|
| 89 |
+
Updated Profile object or None if not found
|
| 90 |
+
"""
|
| 91 |
+
profile = await db.get(Profile, profile_id)
|
| 92 |
+
if not profile:
|
| 93 |
+
return None
|
| 94 |
+
|
| 95 |
+
# Update fields that are not None
|
| 96 |
+
update_data = profile_data.dict(exclude_unset=True)
|
| 97 |
+
for field, value in update_data.items():
|
| 98 |
+
setattr(profile, field, value)
|
| 99 |
+
|
| 100 |
+
profile.updated_at = datetime.utcnow()
|
| 101 |
+
|
| 102 |
+
await db.commit()
|
| 103 |
+
await db.refresh(profile)
|
| 104 |
+
|
| 105 |
+
return profile
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
async def delete_profile(db: AsyncSession, profile_id: UUID) -> bool:
|
| 109 |
+
"""
|
| 110 |
+
Delete a profile by ID.
|
| 111 |
+
|
| 112 |
+
Args:
|
| 113 |
+
db: Database session
|
| 114 |
+
profile_id: Profile ID
|
| 115 |
+
|
| 116 |
+
Returns:
|
| 117 |
+
True if deleted successfully, False if not found
|
| 118 |
+
"""
|
| 119 |
+
profile = await db.get(Profile, profile_id)
|
| 120 |
+
if not profile:
|
| 121 |
+
return False
|
| 122 |
+
|
| 123 |
+
await db.delete(profile)
|
| 124 |
+
await db.commit()
|
| 125 |
+
|
| 126 |
+
return True
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
# ========== Order CRUD Operations ==========
|
| 130 |
+
|
| 131 |
+
async def create_order(db: AsyncSession, order_data: OrderCreate, profile_id: UUID) -> Order:
|
| 132 |
+
"""
|
| 133 |
+
Create a new order.
|
| 134 |
+
|
| 135 |
+
Args:
|
| 136 |
+
db: Database session
|
| 137 |
+
order_data: Order creation data
|
| 138 |
+
profile_id: Profile ID making the order
|
| 139 |
+
|
| 140 |
+
Returns:
|
| 141 |
+
Created Order object
|
| 142 |
+
"""
|
| 143 |
+
order_dict = order_data.dict()
|
| 144 |
+
order_dict["profile_id"] = profile_id
|
| 145 |
+
|
| 146 |
+
# Convert OrderItem objects to dict for storage
|
| 147 |
+
if "items" in order_dict:
|
| 148 |
+
# Handle both Pydantic models and already-converted dicts
|
| 149 |
+
order_dict["items"] = [
|
| 150 |
+
item.dict() if hasattr(item, 'dict') else item
|
| 151 |
+
for item in order_dict["items"]
|
| 152 |
+
]
|
| 153 |
+
|
| 154 |
+
order = Order(**order_dict)
|
| 155 |
+
db.add(order)
|
| 156 |
+
await db.commit()
|
| 157 |
+
await db.refresh(order)
|
| 158 |
+
|
| 159 |
+
return order
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
async def update_order_status(
|
| 163 |
+
db: AsyncSession,
|
| 164 |
+
order_id: UUID,
|
| 165 |
+
status: str,
|
| 166 |
+
stripe_session_id: Optional[str] = None
|
| 167 |
+
) -> Optional[Order]:
|
| 168 |
+
"""
|
| 169 |
+
Update order status and optionally Stripe session ID.
|
| 170 |
+
|
| 171 |
+
Args:
|
| 172 |
+
db: Database session
|
| 173 |
+
order_id: Order ID
|
| 174 |
+
status: New order status
|
| 175 |
+
stripe_session_id: Optional Stripe session ID
|
| 176 |
+
|
| 177 |
+
Returns:
|
| 178 |
+
Updated Order object or None if not found
|
| 179 |
+
"""
|
| 180 |
+
order = await db.get(Order, order_id)
|
| 181 |
+
if not order:
|
| 182 |
+
return None
|
| 183 |
+
|
| 184 |
+
order.status = status
|
| 185 |
+
if stripe_session_id:
|
| 186 |
+
order.stripe_session_id = stripe_session_id
|
| 187 |
+
|
| 188 |
+
await db.commit()
|
| 189 |
+
await db.refresh(order)
|
| 190 |
+
|
| 191 |
+
return order
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
async def get_order_by_stripe_session(db: AsyncSession, stripe_session_id: str) -> Optional[Order]:
|
| 195 |
+
"""
|
| 196 |
+
Get order by Stripe session ID.
|
| 197 |
+
|
| 198 |
+
Args:
|
| 199 |
+
db: Database session
|
| 200 |
+
stripe_session_id: Stripe session ID
|
| 201 |
+
|
| 202 |
+
Returns:
|
| 203 |
+
Order object or None if not found
|
| 204 |
+
"""
|
| 205 |
+
query = select(Order).where(Order.stripe_session_id == stripe_session_id)
|
| 206 |
+
result = await db.execute(query)
|
| 207 |
+
return result.scalar_one_or_none()
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
async def get_orders_by_profile(db: AsyncSession, profile_id: UUID, limit: int = 10) -> List[Order]:
|
| 211 |
+
"""
|
| 212 |
+
Get orders for a specific profile.
|
| 213 |
+
|
| 214 |
+
Args:
|
| 215 |
+
db: Database session
|
| 216 |
+
profile_id: Profile ID
|
| 217 |
+
limit: Maximum number of orders to return
|
| 218 |
+
|
| 219 |
+
Returns:
|
| 220 |
+
List of Order objects
|
| 221 |
+
"""
|
| 222 |
+
query = (
|
| 223 |
+
select(Order)
|
| 224 |
+
.where(Order.profile_id == profile_id)
|
| 225 |
+
.order_by(col(Order.created_at).desc())
|
| 226 |
+
.limit(limit)
|
| 227 |
+
)
|
| 228 |
+
result = await db.execute(query)
|
| 229 |
+
return result.scalars().all()
|
| 230 |
+
|
| 231 |
+
|
| 232 |
+
# ========== Donation CRUD Operations ==========
|
| 233 |
+
|
| 234 |
+
async def create_donation(db: AsyncSession, donation_data: DonationCreate) -> Donation:
|
| 235 |
+
"""
|
| 236 |
+
Create a new donation.
|
| 237 |
+
|
| 238 |
+
Args:
|
| 239 |
+
db: Database session
|
| 240 |
+
donation_data: Donation creation data
|
| 241 |
+
|
| 242 |
+
Returns:
|
| 243 |
+
Created Donation object
|
| 244 |
+
"""
|
| 245 |
+
donation_dict = donation_data.dict()
|
| 246 |
+
donation = Donation(**donation_dict)
|
| 247 |
+
db.add(donation)
|
| 248 |
+
await db.commit()
|
| 249 |
+
await db.refresh(donation)
|
| 250 |
+
|
| 251 |
+
return donation
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
async def update_donation_status(
|
| 255 |
+
db: AsyncSession,
|
| 256 |
+
donation_id: UUID,
|
| 257 |
+
status: str,
|
| 258 |
+
stripe_session_id: Optional[str] = None
|
| 259 |
+
) -> Optional[Donation]:
|
| 260 |
+
"""
|
| 261 |
+
Update donation status and optionally Stripe session ID.
|
| 262 |
+
|
| 263 |
+
Args:
|
| 264 |
+
db: Database session
|
| 265 |
+
donation_id: Donation ID
|
| 266 |
+
status: New donation status
|
| 267 |
+
stripe_session_id: Optional Stripe session ID
|
| 268 |
+
|
| 269 |
+
Returns:
|
| 270 |
+
Updated Donation object or None if not found
|
| 271 |
+
"""
|
| 272 |
+
donation = await db.get(Donation, donation_id)
|
| 273 |
+
if not donation:
|
| 274 |
+
return None
|
| 275 |
+
|
| 276 |
+
donation.status = status
|
| 277 |
+
if stripe_session_id:
|
| 278 |
+
donation.stripe_session_id = stripe_session_id
|
| 279 |
+
|
| 280 |
+
await db.commit()
|
| 281 |
+
await db.refresh(donation)
|
| 282 |
+
|
| 283 |
+
return donation
|
| 284 |
+
|
| 285 |
+
|
| 286 |
+
async def get_donation_by_stripe_session(db: AsyncSession, stripe_session_id: str) -> Optional[Donation]:
|
| 287 |
+
"""
|
| 288 |
+
Get donation by Stripe session ID.
|
| 289 |
+
|
| 290 |
+
Args:
|
| 291 |
+
db: Database session
|
| 292 |
+
stripe_session_id: Stripe session ID
|
| 293 |
+
|
| 294 |
+
Returns:
|
| 295 |
+
Donation object or None if not found
|
| 296 |
+
"""
|
| 297 |
+
query = select(Donation).where(Donation.stripe_session_id == stripe_session_id)
|
| 298 |
+
result = await db.execute(query)
|
| 299 |
+
return result.scalar_one_or_none()
|
| 300 |
+
|
| 301 |
+
|
| 302 |
+
# ========== Dashboard Statistics ==========
|
| 303 |
+
|
| 304 |
+
async def get_dashboard_stats(db: AsyncSession, profile_id: UUID) -> Dict[str, Any]:
|
| 305 |
+
"""
|
| 306 |
+
Get dashboard stats for a specific profile in the format expected by the frontend.
|
| 307 |
+
"""
|
| 308 |
+
# 1. Weekly Heatmap (Last 7 days)
|
| 309 |
+
weekly_heatmap = []
|
| 310 |
+
today = datetime.utcnow().date()
|
| 311 |
+
for i in range(6, -1, -1):
|
| 312 |
+
target_date = today - timedelta(days=i)
|
| 313 |
+
start_time = datetime.combine(target_date, datetime.min.time())
|
| 314 |
+
end_time = datetime.combine(target_date, datetime.max.time())
|
| 315 |
+
|
| 316 |
+
query = (
|
| 317 |
+
select(func.count(Order.id))
|
| 318 |
+
.where(
|
| 319 |
+
Order.profile_id == profile_id,
|
| 320 |
+
# Order.status == "completed", # For demo, count all or pending
|
| 321 |
+
Order.created_at >= start_time,
|
| 322 |
+
Order.created_at <= end_time
|
| 323 |
+
)
|
| 324 |
+
)
|
| 325 |
+
result = await db.execute(query)
|
| 326 |
+
count = result.scalar() or 0
|
| 327 |
+
weekly_heatmap.append({
|
| 328 |
+
"date": target_date.isoformat(),
|
| 329 |
+
"count": count
|
| 330 |
+
})
|
| 331 |
+
|
| 332 |
+
# 2. Nutrition Stats (Dummy/Estimated for now)
|
| 333 |
+
# Target values based on common silver nutrition guidelines
|
| 334 |
+
nutrition_stats = [
|
| 335 |
+
{"category": "蛋白質", "percentage": 85},
|
| 336 |
+
{"category": "維生素", "percentage": 72},
|
| 337 |
+
{"category": "礦物質", "percentage": 68},
|
| 338 |
+
{"category": "纖維質", "percentage": 90},
|
| 339 |
+
{"category": "水分", "percentage": 78},
|
| 340 |
+
{"category": "熱量", "percentage": 82},
|
| 341 |
+
]
|
| 342 |
+
|
| 343 |
+
return {
|
| 344 |
+
"weekly_heatmap": weekly_heatmap,
|
| 345 |
+
"nutrition_stats": nutrition_stats
|
| 346 |
+
}
|
| 347 |
+
|
| 348 |
+
|
| 349 |
+
# ========== Menu Item Operations ==========
|
| 350 |
+
|
| 351 |
+
async def get_menu_items(db: AsyncSession, limit: Optional[int] = None, category: Optional[str] = None) -> List[MenuItem]:
|
| 352 |
+
"""
|
| 353 |
+
Get menu items with optional filtering.
|
| 354 |
+
|
| 355 |
+
Args:
|
| 356 |
+
db: Database session
|
| 357 |
+
limit: Maximum number of items to return
|
| 358 |
+
category: Filter by category
|
| 359 |
+
|
| 360 |
+
Returns:
|
| 361 |
+
List of MenuItem objects
|
| 362 |
+
"""
|
| 363 |
+
query = select(MenuItem).where(MenuItem.available == True)
|
| 364 |
+
|
| 365 |
+
if category:
|
| 366 |
+
query = query.where(MenuItem.category == category)
|
| 367 |
+
|
| 368 |
+
query = query.order_by(col(MenuItem.created_at).desc())
|
| 369 |
+
|
| 370 |
+
if limit:
|
| 371 |
+
query = query.limit(limit)
|
| 372 |
+
|
| 373 |
+
result = await db.execute(query)
|
| 374 |
+
return result.scalars().all()
|
| 375 |
+
|
| 376 |
+
|
| 377 |
+
async def get_menu_item(db: AsyncSession, item_id: int) -> Optional[MenuItem]:
|
| 378 |
+
"""
|
| 379 |
+
Get a specific menu item by ID.
|
| 380 |
+
|
| 381 |
+
Args:
|
| 382 |
+
db: Database session
|
| 383 |
+
item_id: Menu item ID
|
| 384 |
+
|
| 385 |
+
Returns:
|
| 386 |
+
MenuItem object or None if not found
|
| 387 |
+
"""
|
| 388 |
+
result = await db.execute(select(MenuItem).where(MenuItem.id == item_id))
|
| 389 |
+
return result.scalar_one_or_none()
|
| 390 |
+
|
| 391 |
+
|
| 392 |
+
# ========== Analytics and Reporting ==========
|
| 393 |
+
|
| 394 |
+
async def get_user_order_history(db: AsyncSession, profile_id: UUID, days: int = 30) -> List[Order]:
|
| 395 |
+
"""
|
| 396 |
+
Get order history for a user within a specified time period.
|
| 397 |
+
|
| 398 |
+
Args:
|
| 399 |
+
db: Database session
|
| 400 |
+
profile_id: Profile ID
|
| 401 |
+
days: Number of days to look back
|
| 402 |
+
|
| 403 |
+
Returns:
|
| 404 |
+
List of Order objects
|
| 405 |
+
"""
|
| 406 |
+
start_date = datetime.utcnow() - timedelta(days=days)
|
| 407 |
+
|
| 408 |
+
query = (
|
| 409 |
+
select(Order)
|
| 410 |
+
.where(
|
| 411 |
+
Order.profile_id == profile_id,
|
| 412 |
+
Order.created_at >= start_date
|
| 413 |
+
)
|
| 414 |
+
.order_by(col(Order.created_at).desc())
|
| 415 |
+
)
|
| 416 |
+
|
| 417 |
+
result = await db.execute(query)
|
| 418 |
+
return result.scalars().all()
|
| 419 |
+
|
| 420 |
+
|
| 421 |
+
async def get_popular_menu_items(db: AsyncSession, limit: int = 10) -> List[Dict[str, Any]]:
|
| 422 |
+
"""
|
| 423 |
+
Get most popular menu items based on order frequency.
|
| 424 |
+
|
| 425 |
+
Args:
|
| 426 |
+
db: Database session
|
| 427 |
+
limit: Maximum number of items to return
|
| 428 |
+
|
| 429 |
+
Returns:
|
| 430 |
+
List of dictionaries with menu item info and order counts
|
| 431 |
+
"""
|
| 432 |
+
# This is a simplified implementation
|
| 433 |
+
# In a real app, you'd need to analyze order.items JSON data
|
| 434 |
+
# For now, return available menu items ordered by creation date
|
| 435 |
+
query = (
|
| 436 |
+
select(MenuItem)
|
| 437 |
+
.where(MenuItem.available == True)
|
| 438 |
+
.order_by(col(MenuItem.created_at).desc())
|
| 439 |
+
.limit(limit)
|
| 440 |
+
)
|
| 441 |
+
|
| 442 |
+
result = await db.execute(query)
|
| 443 |
+
menu_items = result.scalars().all()
|
| 444 |
+
|
| 445 |
+
# Convert to dict format with placeholder order counts
|
| 446 |
+
return [
|
| 447 |
+
{
|
| 448 |
+
"menu_item": item,
|
| 449 |
+
"order_count": 0, # Would need complex query to calculate
|
| 450 |
+
"total_revenue": 0 # Would need complex query to calculate
|
| 451 |
+
}
|
| 452 |
+
for item in menu_items
|
| 453 |
+
]
|
data/慢性病飲食原則.md
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
糖尿病飲食原則
|
| 2 |
+
|
| 3 |
+
1.定時定量,忌暴飲暴食。
|
| 4 |
+
2.維持理想體重,忌肥胖。
|
| 5 |
+
3.均衡攝取各類食物,並依個人需求控制飲食份量。
|
| 6 |
+
4.儘量選擇高纖維食物,三餐以全穀類或雜糧飯代替白米飯,再搭配蔬菜類(如:葉菜類、菇類、竹筍、絲瓜、胡瓜、莢豆類、海帶等)。
|
| 7 |
+
5.烹調遵守少油、少鹽、少糖的原則,烹調採用清蒸、水煮、涼拌、燒、烤、 燉、滷等方式較佳。
|
| 8 |
+
6.減少油炸、油煎、油酥以及豬皮、雞皮、鴨皮、魚皮等食物之攝取。
|
| 9 |
+
7.炒菜多選用單元不飽和脂肪酸含量高油脂如橄欖油、菜籽油等。
|
| 10 |
+
8.若血液膽固醇濃度過高,需減少飽和脂肪酸攝取(如:肥肉、動物油、動物皮、全脂奶、椰子油),並依個人需求控制飲食中肉品的份量
|
| 11 |
+
9.避免攝取精緻的甜食、含有蔗糖或果糖的飲料、各式糖果或餅乾、水果罐頭等加糖製品,嗜甜食者可使用代糖。
|
| 12 |
+
10.儘量避免喝酒,若應酬需飲酒時,要限量飲用,並應避免空腹喝酒。
|
| 13 |
+
11.注射胰島素或口服降血糖藥物後,若延緩用餐時,需事先進食少許點心,如:一份吐司(25g)或ㄧ杯鮮奶(240ml),並隨身攜帶糖果以防低血糖發生。
|
| 14 |
+
12.ㄧ旦有低血糖症狀,如:飢餓、發抖、冒冷汗、心跳加快、無力、頭暈、嘴唇麻等,應立即進食含10~15公克容易吸收之糖份的食品,例如:含糖飲料(120~150ml)或3-4顆方糖或一匙蜂蜜。
|
| 15 |
+
13.從事額外的運動之前,須先吃些點心。若從事劇烈的運動,每半小時吃含15公克醣類的食物,例如:ㄧ片吐司(25g)或一份水果。
|
| 16 |
+
14.與營養師共同協商,訂定飲食計畫。
|
| 17 |
+
高血壓飲食原則
|
| 18 |
+
1.均衡飲食。
|
| 19 |
+
2.控制體重:體重過重者利用飲食、運動使體重下降有利體重控制。
|
| 20 |
+
3.低油飲食:
|
| 21 |
+
|
| 22 |
+
避免肥肉、豬(雞)皮及油酥糕點攝取
|
| 23 |
+
多用蒸、煮、烤、滷、涼拌等烹調方式以減少油脂攝入。
|
| 24 |
+
4.減少鈉攝取:
|
| 25 |
+
少用鹽、醬油、味精、烏醋等含鈉調味料
|
| 26 |
+
加工食品含鈉量高,例如:麵線、油麵、蘇打餅乾、乳酪、加工肉類(香腸、火腿)、雞精、蜜餞、運動飲料、碳酸飲料、花生醬、罐頭食品等,應儘量少用。
|
| 27 |
+
多選用新鮮食材,自行烹調。
|
| 28 |
+
5.飲食中的鉀離子有對抗高血壓的保護作用,新鮮蔬果多為高鉀食物,增加攝取可幫助血壓控制。
|
| 29 |
+
6.飲食中的鈣離子及鎂離子對於高血壓也有幫助,含鈣離子較多的食物有低脂奶類及製品、小魚干、豆乾、豆腐、深綠色蔬菜、海菜類,含鎂離子較多的食物有海藻、髮菜、海鮮、堅果類、全穀類、綠色蔬菜。
|
| 30 |
+
7.多攝取纖維可使排便順暢,避免因用力解便而使血壓上升。
|
| 31 |
+
8.烹調用油少用動物性油脂(豬油、牛油等),以植物油烹調。
|
| 32 |
+
9.適當調整生活型態,如:戒菸酒、少用刺激性食物、規律運動以及壓力調適。
|
| 33 |
+
高脂血症飲食
|
| 34 |
+
高膽固醇血症飲食原則:
|
| 35 |
+
|
| 36 |
+
1.維持理想體重,避免肥胖。
|
| 37 |
+
2.控制油脂攝取量,減少油炸、油煎,盡量採用清蒸、水煮、涼拌、燒、烤、燉、滷等方式較佳。
|
| 38 |
+
3.少吃含有飽和脂肪酸的食物:
|
| 39 |
+
少吃豬油、奶油、牛油、椰子油
|
| 40 |
+
不要吃雞皮、豬皮、鴨皮、焢肉、三層肉、梅花肉
|
| 41 |
+
多選白肉少吃紅肉
|
| 42 |
+
4.炒菜多選用單元不飽和脂肪酸含量高的油脂(如:橄欖油、芥花油等),並避免高溫烹調。
|
| 43 |
+
5.少吃含有反式脂肪酸的食物:
|
| 44 |
+
少吃酥油、白油、乳瑪琳等。
|
| 45 |
+
各式甜鹹麵包、油酥糕餅、蛋糕。
|
| 46 |
+
各式甜鹹餅乾。
|
| 47 |
+
6.多選用富含高纖維的食物,如:全穀雜糧類、未加工的豆類、蔬菜、水果等。
|
| 48 |
+
7.適當調整生活型態,如戒菸酒、規律運動以及壓力調適。
|
| 49 |
+
|
| 50 |
+
高三酸甘油酯飲食原則:
|
| 51 |
+
1.控制體重可明顯降低血液中三酸甘油酯濃度。
|
| 52 |
+
2.多採用複合式醣類食物,如:全穀根莖類;並避免攝取精緻的甜食、含有蔗糖或果糖的飲料、各式糖果或餅乾、水果罐頭等加糖製品。
|
| 53 |
+
3.可多攝取富含ω-3脂肪酸的魚類,如:秋刀魚、鮭魚、鯖魚、鰻魚、白鯧魚、牡蠣等。
|
| 54 |
+
4.不宜飲酒。
|
| 55 |
+
5.控制油脂攝取量,減少油炸、油煎、油酥以及豬皮、雞皮、鴨皮、魚皮等食物之攝取,盡量採用清蒸、水煮、涼拌、燒、烤、燉、滷等方式較佳。
|
| 56 |
+
6.炒菜多選用單元不飽和脂肪酸含量高的油脂如橄欖油、芥花油等,並避免高溫烹調。
|
| 57 |
+
資料維護人:羅東聖母醫院
|
database.py
ADDED
|
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Database configuration and session management for Silver Table Assistant backend.
|
| 3 |
+
Uses SQLModel with AsyncEngine for Supabase PostgreSQL with pgvector support.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
from typing import AsyncGenerator
|
| 8 |
+
|
| 9 |
+
from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, create_async_engine
|
| 10 |
+
from sqlalchemy.orm import sessionmaker
|
| 11 |
+
from sqlmodel import SQLModel
|
| 12 |
+
|
| 13 |
+
# Environment variables
|
| 14 |
+
SUPABASE_URL = os.getenv("SUPABASE_URL")
|
| 15 |
+
SUPABASE_SERVICE_ROLE_KEY = os.getenv("SUPABASE_SERVICE_ROLE_KEY")
|
| 16 |
+
DATABASE_URL = os.getenv("DATABASE_URL")
|
| 17 |
+
|
| 18 |
+
# Validate required environment variables
|
| 19 |
+
if not SUPABASE_URL:
|
| 20 |
+
raise ValueError("SUPABASE_URL environment variable is required")
|
| 21 |
+
if not SUPABASE_SERVICE_ROLE_KEY:
|
| 22 |
+
raise ValueError("SUPABASE_SERVICE_ROLE_KEY environment variable is required")
|
| 23 |
+
|
| 24 |
+
# Construct database URL for async Supabase connection
|
| 25 |
+
# Using service role key for server-side operations
|
| 26 |
+
if DATABASE_URL:
|
| 27 |
+
# Use provided DATABASE_URL if available
|
| 28 |
+
ASYNC_DATABASE_URL = DATABASE_URL.replace("postgresql://", "postgresql+asyncpg://")
|
| 29 |
+
else:
|
| 30 |
+
# Construct from Supabase URL
|
| 31 |
+
# Format: postgresql+asyncpg://postgres:[password]@db.[project-ref].supabase.co:5432/postgres
|
| 32 |
+
base_url = SUPABASE_URL.replace("https://", "").replace("http://", "")
|
| 33 |
+
project_ref = base_url.split(".")[0]
|
| 34 |
+
ASYNC_DATABASE_URL = f"postgresql+asyncpg://postgres:{SUPABASE_SERVICE_ROLE_KEY}@db.{project_ref}.supabase.co:5432/postgres"
|
| 35 |
+
|
| 36 |
+
from sqlalchemy.pool import NullPool
|
| 37 |
+
|
| 38 |
+
# Create async engine with pgvector support
|
| 39 |
+
# Using NullPool because we are connecting through PGBouncer in transaction mode (port 6543)
|
| 40 |
+
engine: AsyncEngine = create_async_engine(
|
| 41 |
+
ASYNC_DATABASE_URL,
|
| 42 |
+
echo=False, # Set to True for SQL query logging
|
| 43 |
+
poolclass=NullPool,
|
| 44 |
+
# Additional settings for Supabase/PGBouncer
|
| 45 |
+
connect_args={
|
| 46 |
+
"statement_cache_size": 0, # Disable prepared statement cache for PGBouncer
|
| 47 |
+
"server_settings": {
|
| 48 |
+
"jit": "off", # Disable JIT for pgvector compatibility
|
| 49 |
+
}
|
| 50 |
+
},
|
| 51 |
+
)
|
| 52 |
+
|
| 53 |
+
# Create session factory
|
| 54 |
+
SessionLocal = sessionmaker(
|
| 55 |
+
bind=engine,
|
| 56 |
+
class_=AsyncSession,
|
| 57 |
+
expire_on_commit=False,
|
| 58 |
+
autocommit=False,
|
| 59 |
+
autoflush=False,
|
| 60 |
+
)
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
async def create_db_and_tables() -> None:
|
| 64 |
+
"""Create database tables if they don't exist."""
|
| 65 |
+
async with engine.begin() as conn:
|
| 66 |
+
await conn.run_sync(SQLModel.metadata.create_all)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
async def get_session() -> AsyncGenerator[AsyncSession, None]:
|
| 70 |
+
"""
|
| 71 |
+
Dependency function to get database session.
|
| 72 |
+
Should be used with FastAPI's Depends() decorator.
|
| 73 |
+
"""
|
| 74 |
+
async with SessionLocal() as session:
|
| 75 |
+
try:
|
| 76 |
+
yield session
|
| 77 |
+
finally:
|
| 78 |
+
await session.close()
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
def get_db_session() -> AsyncSession:
|
| 82 |
+
"""
|
| 83 |
+
Get a database session for use in functions.
|
| 84 |
+
Remember to close the session after use.
|
| 85 |
+
"""
|
| 86 |
+
return SessionLocal()
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
async def close_db_connections() -> None:
|
| 90 |
+
"""Close all database connections."""
|
| 91 |
+
await engine.dispose()
|
dependencies.py
ADDED
|
@@ -0,0 +1,335 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Authentication and authorization dependencies for Silver Table Assistant.
|
| 3 |
+
Provides JWT verification and user role management using Supabase Auth.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
import jwt
|
| 8 |
+
from typing import Optional, Dict, Any, Callable
|
| 9 |
+
from fastapi import Depends, HTTPException, status, Request
|
| 10 |
+
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
| 11 |
+
from supabase import create_client, Client
|
| 12 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 13 |
+
|
| 14 |
+
from database import get_session
|
| 15 |
+
from models import Profile
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
# Initialize Supabase client
|
| 19 |
+
SUPABASE_URL = os.getenv("SUPABASE_URL")
|
| 20 |
+
SUPABASE_SERVICE_ROLE_KEY = os.getenv("SUPABASE_SERVICE_ROLE_KEY")
|
| 21 |
+
|
| 22 |
+
if not SUPABASE_URL or not SUPABASE_SERVICE_ROLE_KEY:
|
| 23 |
+
raise ValueError("SUPABASE_URL and SUPABASE_SERVICE_ROLE_KEY environment variables are required")
|
| 24 |
+
|
| 25 |
+
supabase: Client = create_client(SUPABASE_URL, SUPABASE_SERVICE_ROLE_KEY)
|
| 26 |
+
|
| 27 |
+
# Security scheme
|
| 28 |
+
security = HTTPBearer()
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class AuthenticationError(HTTPException):
|
| 32 |
+
"""Custom exception for authentication failures."""
|
| 33 |
+
|
| 34 |
+
def __init__(self, detail: str = "Could not validate credentials"):
|
| 35 |
+
super().__init__(
|
| 36 |
+
status_code=status.HTTP_401_UNAUTHORIZED,
|
| 37 |
+
detail=detail,
|
| 38 |
+
headers={"WWW-Authenticate": "Bearer"},
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class AuthorizationError(HTTPException):
|
| 43 |
+
"""Custom exception for authorization failures."""
|
| 44 |
+
|
| 45 |
+
def __init__(self, detail: str = "Not enough permissions"):
|
| 46 |
+
super().__init__(
|
| 47 |
+
status_code=status.HTTP_403_FORBIDDEN,
|
| 48 |
+
detail=detail,
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
class User:
|
| 53 |
+
"""User class representing an authenticated user."""
|
| 54 |
+
|
| 55 |
+
def __init__(
|
| 56 |
+
self,
|
| 57 |
+
user_id: str,
|
| 58 |
+
email: Optional[str] = None,
|
| 59 |
+
role: Optional[str] = None,
|
| 60 |
+
metadata: Optional[Dict[str, Any]] = None,
|
| 61 |
+
raw_user: Optional[Dict[str, Any]] = None
|
| 62 |
+
):
|
| 63 |
+
self.user_id = user_id
|
| 64 |
+
self.email = email
|
| 65 |
+
self.role = role or "family" # Default to family
|
| 66 |
+
self.metadata = metadata or {}
|
| 67 |
+
self.raw_user = raw_user or {}
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
async def verify_jwt_token(token: str) -> Dict[str, Any]:
|
| 71 |
+
"""
|
| 72 |
+
Verify JWT token using Supabase Auth.
|
| 73 |
+
|
| 74 |
+
Args:
|
| 75 |
+
token: JWT token to verify
|
| 76 |
+
|
| 77 |
+
Returns:
|
| 78 |
+
Decoded user information
|
| 79 |
+
|
| 80 |
+
Raises:
|
| 81 |
+
AuthenticationError: If token is invalid
|
| 82 |
+
"""
|
| 83 |
+
try:
|
| 84 |
+
# Verify the token with Supabase
|
| 85 |
+
response = supabase.auth.get_user(token)
|
| 86 |
+
|
| 87 |
+
if response.user is None:
|
| 88 |
+
raise AuthenticationError("Invalid token")
|
| 89 |
+
|
| 90 |
+
return {
|
| 91 |
+
"user_id": response.user.id,
|
| 92 |
+
"email": response.user.email,
|
| 93 |
+
"role": response.user.user_metadata.get("role", "user"),
|
| 94 |
+
"metadata": response.user.user_metadata,
|
| 95 |
+
"raw_user": response.user.__dict__
|
| 96 |
+
}
|
| 97 |
+
except Exception as e:
|
| 98 |
+
raise AuthenticationError(f"Token verification failed: {str(e)}")
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
async def get_current_user(
|
| 102 |
+
credentials: HTTPAuthorizationCredentials = Depends(security)
|
| 103 |
+
) -> User:
|
| 104 |
+
"""
|
| 105 |
+
Get current authenticated user from JWT token.
|
| 106 |
+
|
| 107 |
+
Args:
|
| 108 |
+
credentials: HTTP authorization credentials
|
| 109 |
+
|
| 110 |
+
Returns:
|
| 111 |
+
User object with authentication information
|
| 112 |
+
|
| 113 |
+
Raises:
|
| 114 |
+
AuthenticationError: If authentication fails
|
| 115 |
+
"""
|
| 116 |
+
if not credentials:
|
| 117 |
+
raise AuthenticationError("No credentials provided")
|
| 118 |
+
|
| 119 |
+
try:
|
| 120 |
+
# Extract token from Bearer scheme
|
| 121 |
+
token = credentials.credentials
|
| 122 |
+
|
| 123 |
+
# Verify token and get user info
|
| 124 |
+
user_data = await verify_jwt_token(token)
|
| 125 |
+
|
| 126 |
+
return User(
|
| 127 |
+
user_id=user_data["user_id"],
|
| 128 |
+
email=user_data["email"],
|
| 129 |
+
role=user_data["role"],
|
| 130 |
+
metadata=user_data["metadata"],
|
| 131 |
+
raw_user=user_data["raw_user"]
|
| 132 |
+
)
|
| 133 |
+
except AuthenticationError:
|
| 134 |
+
raise
|
| 135 |
+
except Exception as e:
|
| 136 |
+
raise AuthenticationError(f"Authentication failed: {str(e)}")
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
async def get_optional_user(
|
| 140 |
+
request: Request
|
| 141 |
+
) -> Optional[User]:
|
| 142 |
+
"""
|
| 143 |
+
Get current user if authenticated, otherwise return None.
|
| 144 |
+
|
| 145 |
+
Args:
|
| 146 |
+
request: FastAPI request object
|
| 147 |
+
|
| 148 |
+
Returns:
|
| 149 |
+
User object or None
|
| 150 |
+
"""
|
| 151 |
+
try:
|
| 152 |
+
# Extract token from Authorization header if present
|
| 153 |
+
auth_header = request.headers.get("Authorization")
|
| 154 |
+
if not auth_header or not auth_header.startswith("Bearer "):
|
| 155 |
+
return None
|
| 156 |
+
|
| 157 |
+
token = auth_header.split(" ", 1)[1]
|
| 158 |
+
credentials = HTTPAuthorizationCredentials(scheme="Bearer", credentials=token)
|
| 159 |
+
|
| 160 |
+
return await get_current_user(credentials)
|
| 161 |
+
except AuthenticationError:
|
| 162 |
+
return None
|
| 163 |
+
except Exception:
|
| 164 |
+
return None
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
def require_role(required_role: str) -> Callable:
|
| 168 |
+
"""
|
| 169 |
+
Dependency factory to require specific user role.
|
| 170 |
+
|
| 171 |
+
Args:
|
| 172 |
+
required_role: Required user role
|
| 173 |
+
|
| 174 |
+
Returns:
|
| 175 |
+
Dependency function
|
| 176 |
+
"""
|
| 177 |
+
async def role_checker(user: User = Depends(get_current_user)) -> User:
|
| 178 |
+
if user.role != required_role and user.role != "admin":
|
| 179 |
+
raise AuthorizationError(
|
| 180 |
+
f"Required role: {required_role}, your role: {user.role}"
|
| 181 |
+
)
|
| 182 |
+
return user
|
| 183 |
+
return role_checker
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
def require_roles(allowed_roles: list[str]) -> Callable:
|
| 187 |
+
"""
|
| 188 |
+
Dependency factory to require one of multiple user roles.
|
| 189 |
+
|
| 190 |
+
Args:
|
| 191 |
+
allowed_roles: List of allowed user roles
|
| 192 |
+
|
| 193 |
+
Returns:
|
| 194 |
+
Dependency function
|
| 195 |
+
"""
|
| 196 |
+
async def roles_checker(user: User = Depends(get_current_user)) -> User:
|
| 197 |
+
if user.role not in allowed_roles and user.role != "admin":
|
| 198 |
+
raise AuthorizationError(
|
| 199 |
+
f"Required roles: {allowed_roles}, your role: {user.role}"
|
| 200 |
+
)
|
| 201 |
+
return user
|
| 202 |
+
return roles_checker
|
| 203 |
+
|
| 204 |
+
|
| 205 |
+
async def get_user_profile(
|
| 206 |
+
user: User = Depends(get_current_user),
|
| 207 |
+
db: AsyncSession = Depends(get_session)
|
| 208 |
+
) -> Optional[Profile]:
|
| 209 |
+
"""
|
| 210 |
+
Get user's first profile from database (legacy/helper).
|
| 211 |
+
"""
|
| 212 |
+
try:
|
| 213 |
+
from sqlmodel import select
|
| 214 |
+
from uuid import UUID
|
| 215 |
+
user_id_uuid = UUID(user.user_id)
|
| 216 |
+
result = await db.execute(
|
| 217 |
+
select(Profile).where(Profile.user_id == user_id_uuid)
|
| 218 |
+
)
|
| 219 |
+
profile = result.scalars().first()
|
| 220 |
+
return profile
|
| 221 |
+
except Exception as e:
|
| 222 |
+
raise HTTPException(
|
| 223 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 224 |
+
detail=f"Failed to fetch user profile: {str(e)}"
|
| 225 |
+
)
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
async def get_or_create_user_profile(
|
| 229 |
+
user: User = Depends(get_current_user),
|
| 230 |
+
db: AsyncSession = Depends(get_session)
|
| 231 |
+
) -> Profile:
|
| 232 |
+
"""
|
| 233 |
+
Get existing user profile or create a generic one.
|
| 234 |
+
"""
|
| 235 |
+
profile = await get_user_profile(user, db)
|
| 236 |
+
|
| 237 |
+
if profile is None:
|
| 238 |
+
from models import Profile
|
| 239 |
+
from uuid import UUID, uuid4
|
| 240 |
+
|
| 241 |
+
user_id_uuid = UUID(user.user_id)
|
| 242 |
+
|
| 243 |
+
# Create new profile with default values matching new schema
|
| 244 |
+
profile = Profile(
|
| 245 |
+
id=uuid4(),
|
| 246 |
+
user_id=user_id_uuid,
|
| 247 |
+
name=user.email.split('@')[0] if user.email else "User",
|
| 248 |
+
age=70,
|
| 249 |
+
gender="male",
|
| 250 |
+
height=165.0,
|
| 251 |
+
weight=60.0,
|
| 252 |
+
chronic_diseases=[],
|
| 253 |
+
dietary_restrictions=[],
|
| 254 |
+
chewing_ability="normal"
|
| 255 |
+
)
|
| 256 |
+
|
| 257 |
+
db.add(profile)
|
| 258 |
+
await db.commit()
|
| 259 |
+
await db.refresh(profile)
|
| 260 |
+
|
| 261 |
+
return profile
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
def get_user_metadata(user: User = Depends(get_current_user)) -> Dict[str, Any]:
|
| 265 |
+
"""
|
| 266 |
+
Get user metadata for use in AI prompts and recommendations.
|
| 267 |
+
|
| 268 |
+
Args:
|
| 269 |
+
user: Authenticated user
|
| 270 |
+
|
| 271 |
+
Returns:
|
| 272 |
+
User metadata dictionary
|
| 273 |
+
"""
|
| 274 |
+
return {
|
| 275 |
+
"user_id": user.user_id,
|
| 276 |
+
"email": user.email,
|
| 277 |
+
"role": user.role,
|
| 278 |
+
**user.metadata
|
| 279 |
+
}
|
| 280 |
+
|
| 281 |
+
|
| 282 |
+
def get_admin_user(
|
| 283 |
+
admin_only: User = Depends(require_role("admin"))
|
| 284 |
+
) -> User:
|
| 285 |
+
"""
|
| 286 |
+
Dependency for admin-only endpoints.
|
| 287 |
+
|
| 288 |
+
Args:
|
| 289 |
+
admin_only: User with admin role
|
| 290 |
+
|
| 291 |
+
Returns:
|
| 292 |
+
Admin user
|
| 293 |
+
"""
|
| 294 |
+
return admin_only
|
| 295 |
+
|
| 296 |
+
|
| 297 |
+
# Utility functions for role-based access control
|
| 298 |
+
|
| 299 |
+
def is_admin(user: User) -> bool:
|
| 300 |
+
"""Check if user is an admin."""
|
| 301 |
+
return user.role == "admin"
|
| 302 |
+
|
| 303 |
+
|
| 304 |
+
def is_staff(user: User) -> bool:
|
| 305 |
+
"""Check if user is staff or admin."""
|
| 306 |
+
return user.role in ["staff", "admin"]
|
| 307 |
+
|
| 308 |
+
|
| 309 |
+
def can_access_health_data(user: User) -> bool:
|
| 310 |
+
"""Check if user can access health-related data."""
|
| 311 |
+
return user.role in ["admin", "staff", "user"]
|
| 312 |
+
|
| 313 |
+
|
| 314 |
+
def can_manage_orders(user: User) -> bool:
|
| 315 |
+
"""Check if user can manage orders."""
|
| 316 |
+
return user.role in ["admin", "staff"]
|
| 317 |
+
|
| 318 |
+
|
| 319 |
+
def can_view_analytics(user: User) -> bool:
|
| 320 |
+
"""Check if user can view analytics."""
|
| 321 |
+
return user.role == "admin"
|
| 322 |
+
|
| 323 |
+
|
| 324 |
+
# CORS and security utilities
|
| 325 |
+
|
| 326 |
+
def get_allowed_origins() -> list[str]:
|
| 327 |
+
"""Get allowed CORS origins from environment."""
|
| 328 |
+
from config import settings
|
| 329 |
+
return settings.cors_origins
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
def get_api_version() -> str:
|
| 333 |
+
"""Get API version from environment or default."""
|
| 334 |
+
from config import settings
|
| 335 |
+
return settings.api_version
|
exceptions.py
ADDED
|
@@ -0,0 +1,237 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Custom exceptions and error handling for Silver Table Assistant.
|
| 3 |
+
Provides structured error handling and logging.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import logging
|
| 7 |
+
from typing import Any, Dict, Optional
|
| 8 |
+
from fastapi import HTTPException, status
|
| 9 |
+
from sqlalchemy.exc import SQLAlchemyError, IntegrityError
|
| 10 |
+
try:
|
| 11 |
+
from stripe import StripeError, CardError, AuthenticationError, InvalidRequestError
|
| 12 |
+
except ImportError:
|
| 13 |
+
# Fallback for different stripe versions or if not installed
|
| 14 |
+
class StripeError(Exception): pass
|
| 15 |
+
class CardError(StripeError): pass
|
| 16 |
+
class AuthenticationError(StripeError): pass
|
| 17 |
+
class InvalidRequestError(StripeError): pass
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
# Configure logging
|
| 21 |
+
logger = logging.getLogger(__name__)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class SilverTableException(Exception):
|
| 25 |
+
"""Base exception for Silver Table Assistant."""
|
| 26 |
+
|
| 27 |
+
def __init__(self, message: str, details: Optional[Dict[str, Any]] = None):
|
| 28 |
+
self.message = message
|
| 29 |
+
self.details = details or {}
|
| 30 |
+
super().__init__(self.message)
|
| 31 |
+
|
| 32 |
+
def to_dict(self) -> Dict[str, Any]:
|
| 33 |
+
"""Convert exception to dictionary for logging/response."""
|
| 34 |
+
return {
|
| 35 |
+
"error": self.__class__.__name__,
|
| 36 |
+
"message": self.message,
|
| 37 |
+
"details": self.details
|
| 38 |
+
}
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class DatabaseException(SilverTableException):
|
| 42 |
+
"""Database-related errors."""
|
| 43 |
+
pass
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
class PaymentException(SilverTableException):
|
| 47 |
+
"""Payment/Stripe-related errors."""
|
| 48 |
+
pass
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
class AuthenticationException(SilverTableException):
|
| 52 |
+
"""Authentication-related errors."""
|
| 53 |
+
pass
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
class ValidationException(SilverTableException):
|
| 57 |
+
"""Data validation errors."""
|
| 58 |
+
pass
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
class ExternalServiceException(SilverTableException):
|
| 62 |
+
"""External service (OpenAI, etc.) errors."""
|
| 63 |
+
pass
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
def handle_database_error(error: Exception, operation: str) -> DatabaseException:
|
| 67 |
+
"""Handle database errors with appropriate logging."""
|
| 68 |
+
|
| 69 |
+
if isinstance(error, IntegrityError):
|
| 70 |
+
message = f"Database integrity error during {operation}"
|
| 71 |
+
logger.error(f"{message}: {str(error)}", extra={"error_type": "integrity_error"})
|
| 72 |
+
elif isinstance(error, SQLAlchemyError):
|
| 73 |
+
message = f"Database operation failed during {operation}"
|
| 74 |
+
logger.error(f"{message}: {str(error)}", extra={"error_type": "sqlalchemy_error"})
|
| 75 |
+
else:
|
| 76 |
+
message = f"Unexpected database error during {operation}"
|
| 77 |
+
logger.error(f"{message}: {str(error)}", extra={"error_type": "unknown_db_error"})
|
| 78 |
+
|
| 79 |
+
return DatabaseException(message, {"operation": operation, "original_error": str(error)})
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
def handle_payment_error(error: Exception, operation: str) -> PaymentException:
|
| 83 |
+
"""Handle Stripe payment errors with appropriate logging."""
|
| 84 |
+
|
| 85 |
+
if isinstance(error, CardError):
|
| 86 |
+
message = f"Payment card error during {operation}: {error.user_message}"
|
| 87 |
+
logger.warning(f"{message}: {str(error)}", extra={"error_type": "card_error"})
|
| 88 |
+
elif isinstance(error, AuthenticationError):
|
| 89 |
+
message = f"Payment authentication error during {operation}"
|
| 90 |
+
logger.error(f"{message}: {str(error)}", extra={"error_type": "auth_error"})
|
| 91 |
+
elif isinstance(error, InvalidRequestError):
|
| 92 |
+
message = f"Invalid payment request during {operation}: {str(error)}"
|
| 93 |
+
logger.warning(f"{message}: {str(error)}", extra={"error_type": "invalid_request"})
|
| 94 |
+
elif isinstance(error, StripeError):
|
| 95 |
+
message = f"Stripe error during {operation}: {str(error)}"
|
| 96 |
+
logger.error(f"{message}: {str(error)}", extra={"error_type": "stripe_error"})
|
| 97 |
+
else:
|
| 98 |
+
message = f"Unexpected payment error during {operation}"
|
| 99 |
+
logger.error(f"{message}: {str(error)}", extra={"error_type": "unknown_payment_error"})
|
| 100 |
+
|
| 101 |
+
return PaymentException(message, {"operation": operation, "original_error": str(error)})
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
def handle_external_service_error(error: Exception, service: str, operation: str) -> ExternalServiceException:
|
| 105 |
+
"""Handle external service errors (OpenAI, etc.)."""
|
| 106 |
+
|
| 107 |
+
message = f"{service} service error during {operation}: {str(error)}"
|
| 108 |
+
logger.error(message, extra={"error_type": "external_service_error", "service": service})
|
| 109 |
+
|
| 110 |
+
return ExternalServiceException(
|
| 111 |
+
message,
|
| 112 |
+
{"service": service, "operation": operation, "original_error": str(error)}
|
| 113 |
+
)
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
def handle_authentication_error(error: Exception, operation: str) -> AuthenticationException:
|
| 117 |
+
"""Handle authentication errors."""
|
| 118 |
+
|
| 119 |
+
message = f"Authentication error during {operation}: {str(error)}"
|
| 120 |
+
logger.warning(message, extra={"error_type": "auth_error"})
|
| 121 |
+
|
| 122 |
+
return AuthenticationException(message, {"operation": operation, "original_error": str(error)})
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
def handle_validation_error(error: Exception, field: str, operation: str) -> ValidationException:
|
| 126 |
+
"""Handle validation errors."""
|
| 127 |
+
|
| 128 |
+
message = f"Validation error in {field} during {operation}: {str(error)}"
|
| 129 |
+
logger.warning(message, extra={"error_type": "validation_error", "field": field})
|
| 130 |
+
|
| 131 |
+
return ValidationException(
|
| 132 |
+
message,
|
| 133 |
+
{"field": field, "operation": operation, "original_error": str(error)}
|
| 134 |
+
)
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
def http_exception_from_custom(exception: SilverTableException, status_code: int = status.HTTP_500_INTERNAL_SERVER_ERROR) -> HTTPException:
|
| 138 |
+
"""Convert custom exception to FastAPI HTTPException."""
|
| 139 |
+
|
| 140 |
+
return HTTPException(
|
| 141 |
+
status_code=status_code,
|
| 142 |
+
detail={
|
| 143 |
+
"error": exception.__class__.__name__,
|
| 144 |
+
"message": exception.message,
|
| 145 |
+
"details": exception.details
|
| 146 |
+
}
|
| 147 |
+
)
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
def log_and_handle_error(
|
| 151 |
+
error: Exception,
|
| 152 |
+
context: str,
|
| 153 |
+
operation: str,
|
| 154 |
+
reraise: bool = True
|
| 155 |
+
) -> Optional[SilverTableException]:
|
| 156 |
+
"""
|
| 157 |
+
Centralized error handling with logging.
|
| 158 |
+
|
| 159 |
+
Args:
|
| 160 |
+
error: The exception that occurred
|
| 161 |
+
context: Context where the error occurred
|
| 162 |
+
operation: Operation being performed
|
| 163 |
+
reraise: Whether to reraise as custom exception
|
| 164 |
+
|
| 165 |
+
Returns:
|
| 166 |
+
Custom exception if not reraising, None otherwise
|
| 167 |
+
"""
|
| 168 |
+
|
| 169 |
+
try:
|
| 170 |
+
if isinstance(error, (SQLAlchemyError, IntegrityError)):
|
| 171 |
+
custom_exception = handle_database_error(error, operation)
|
| 172 |
+
elif isinstance(error, StripeError):
|
| 173 |
+
custom_exception = handle_payment_error(error, operation)
|
| 174 |
+
elif isinstance(error, (AuthenticationError,)):
|
| 175 |
+
custom_exception = handle_authentication_error(error, operation)
|
| 176 |
+
else:
|
| 177 |
+
# Generic error handling
|
| 178 |
+
message = f"Unexpected error during {operation} in {context}: {str(error)}"
|
| 179 |
+
logger.error(message, extra={"error_type": "unexpected_error", "context": context})
|
| 180 |
+
custom_exception = SilverTableException(
|
| 181 |
+
message,
|
| 182 |
+
{"context": context, "operation": operation, "original_error": str(error)}
|
| 183 |
+
)
|
| 184 |
+
|
| 185 |
+
if reraise:
|
| 186 |
+
raise custom_exception
|
| 187 |
+
else:
|
| 188 |
+
return custom_exception
|
| 189 |
+
|
| 190 |
+
except Exception as handling_error:
|
| 191 |
+
logger.error(f"Error in error handler: {str(handling_error)}")
|
| 192 |
+
if reraise:
|
| 193 |
+
raise SilverTableException(f"Error handling failed: {str(handling_error)}")
|
| 194 |
+
else:
|
| 195 |
+
return SilverTableException(f"Error handling failed: {str(handling_error)}")
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
# Context manager for database operations
|
| 199 |
+
class DatabaseTransaction:
|
| 200 |
+
"""Context manager for database transactions with proper error handling."""
|
| 201 |
+
|
| 202 |
+
def __init__(self, session):
|
| 203 |
+
self.session = session
|
| 204 |
+
|
| 205 |
+
async def __aenter__(self):
|
| 206 |
+
return self.session
|
| 207 |
+
|
| 208 |
+
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
| 209 |
+
if exc_type is not None:
|
| 210 |
+
# Rollback on error
|
| 211 |
+
await self.session.rollback()
|
| 212 |
+
logger.error(f"Database transaction rolled back due to: {exc_val}")
|
| 213 |
+
else:
|
| 214 |
+
# Commit on success
|
| 215 |
+
try:
|
| 216 |
+
await self.session.commit()
|
| 217 |
+
except Exception as e:
|
| 218 |
+
await self.session.rollback()
|
| 219 |
+
raise handle_database_error(e, "transaction_commit")
|
| 220 |
+
|
| 221 |
+
await self.session.close()
|
| 222 |
+
|
| 223 |
+
|
| 224 |
+
# Decorator for automatic error handling
|
| 225 |
+
def handle_errors(operation: str, reraise: bool = True):
|
| 226 |
+
"""Decorator for automatic error handling in functions."""
|
| 227 |
+
|
| 228 |
+
def decorator(func):
|
| 229 |
+
async def wrapper(*args, **kwargs):
|
| 230 |
+
try:
|
| 231 |
+
return await func(*args, **kwargs)
|
| 232 |
+
except Exception as e:
|
| 233 |
+
return log_and_handle_error(e, func.__name__, operation, reraise)
|
| 234 |
+
|
| 235 |
+
return wrapper
|
| 236 |
+
|
| 237 |
+
return decorator
|
menu_data.py
ADDED
|
@@ -0,0 +1,413 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Taiwanese elderly-friendly meal data for Silver Table Assistant.
|
| 3 |
+
Provides 15-20 nutritious, easy-to-chew meals suitable for seniors.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
from typing import List, Dict, Any
|
| 7 |
+
|
| 8 |
+
# Taiwanese elderly-friendly menu items
|
| 9 |
+
MENU_ITEMS: List[Dict[str, Any]] = [
|
| 10 |
+
{
|
| 11 |
+
"id": 1,
|
| 12 |
+
"name": "清蒸鱸魚",
|
| 13 |
+
"description": "新鮮鱸魚配薑絲蒸煮,魚肉嫩滑易咀嚼,富含優質蛋白質",
|
| 14 |
+
"suitable_for": ["高血壓", "糖尿病", "牙口不好", "清淡飲食"],
|
| 15 |
+
"image_url": "https://placehold.co/400x300?text=清蒸鱸魚",
|
| 16 |
+
"nutrition": {
|
| 17 |
+
"calories": 180,
|
| 18 |
+
"protein": 25.0,
|
| 19 |
+
"fat": 8.0,
|
| 20 |
+
"carbs": 2.0,
|
| 21 |
+
"sodium": 120,
|
| 22 |
+
"fiber": 0.5,
|
| 23 |
+
"sugar": 1.0
|
| 24 |
+
},
|
| 25 |
+
"price": 28000, # 280元 in cents
|
| 26 |
+
"category": "蛋白質類",
|
| 27 |
+
"available": True
|
| 28 |
+
},
|
| 29 |
+
{
|
| 30 |
+
"id": 2,
|
| 31 |
+
"name": "南瓜燉肉",
|
| 32 |
+
"description": "南瓜與瘦肉慢燉,口感軟糯,富含維生素A和膳食纖維",
|
| 33 |
+
"suitable_for": ["高血壓", "便秘", "營養不良"],
|
| 34 |
+
"image_url": "https://placehold.co/400x300?text=南瓜燉肉",
|
| 35 |
+
"nutrition": {
|
| 36 |
+
"calories": 220,
|
| 37 |
+
"protein": 18.0,
|
| 38 |
+
"fat": 12.0,
|
| 39 |
+
"carbs": 15.0,
|
| 40 |
+
"sodium": 200,
|
| 41 |
+
"fiber": 3.0,
|
| 42 |
+
"sugar": 8.0
|
| 43 |
+
},
|
| 44 |
+
"price": 22000, # 220元 in cents
|
| 45 |
+
"category": "葷食類",
|
| 46 |
+
"available": True
|
| 47 |
+
},
|
| 48 |
+
{
|
| 49 |
+
"id": 3,
|
| 50 |
+
"name": "白蘿蔔湯",
|
| 51 |
+
"description": "清淡蘿蔔湯,口感軟嫩,幫助消化,利尿消腫",
|
| 52 |
+
"suitable_for": ["高血壓", "糖尿病", "便秘", "清淡飲食"],
|
| 53 |
+
"image_url": "https://placehold.co/400x300?text=白蘿蔔湯",
|
| 54 |
+
"nutrition": {
|
| 55 |
+
"calories": 45,
|
| 56 |
+
"protein": 2.0,
|
| 57 |
+
"fat": 0.5,
|
| 58 |
+
"carbs": 9.0,
|
| 59 |
+
"sodium": 150,
|
| 60 |
+
"fiber": 2.0,
|
| 61 |
+
"sugar": 4.0
|
| 62 |
+
},
|
| 63 |
+
"price": 8000, # 80元 in cents
|
| 64 |
+
"category": "湯品",
|
| 65 |
+
"available": True
|
| 66 |
+
},
|
| 67 |
+
{
|
| 68 |
+
"id": 4,
|
| 69 |
+
"name": "蒸蛋羹",
|
| 70 |
+
"description": "嫩滑蒸蛋,口感柔軟易吞嚥,富含優質蛋白質",
|
| 71 |
+
"suitable_for": ["牙口不好", "營養不良", "清淡飲食"],
|
| 72 |
+
"image_url": "https://placehold.co/400x300?text=蒸蛋羹",
|
| 73 |
+
"nutrition": {
|
| 74 |
+
"calories": 120,
|
| 75 |
+
"protein": 10.0,
|
| 76 |
+
"fat": 8.0,
|
| 77 |
+
"carbs": 2.0,
|
| 78 |
+
"sodium": 80,
|
| 79 |
+
"fiber": 0.0,
|
| 80 |
+
"sugar": 1.5
|
| 81 |
+
},
|
| 82 |
+
"price": 6000, # 60元 in cents
|
| 83 |
+
"category": "蛋白質類",
|
| 84 |
+
"available": True
|
| 85 |
+
},
|
| 86 |
+
{
|
| 87 |
+
"id": 5,
|
| 88 |
+
"name": "紅棗銀耳湯",
|
| 89 |
+
"description": "溫潤甜湯,銀耳富含膠質,紅棗補血養顏",
|
| 90 |
+
"suitable_for": ["糖尿病", "便秘", "美容養顏"],
|
| 91 |
+
"image_url": "https://placehold.co/400x300?text=紅棗銀耳湯",
|
| 92 |
+
"nutrition": {
|
| 93 |
+
"calories": 95,
|
| 94 |
+
"protein": 1.5,
|
| 95 |
+
"fat": 0.3,
|
| 96 |
+
"carbs": 22.0,
|
| 97 |
+
"sodium": 20,
|
| 98 |
+
"fiber": 4.0,
|
| 99 |
+
"sugar": 15.0
|
| 100 |
+
},
|
| 101 |
+
"price": 12000, # 120元 in cents
|
| 102 |
+
"category": "甜品類",
|
| 103 |
+
"available": True
|
| 104 |
+
},
|
| 105 |
+
{
|
| 106 |
+
"id": 6,
|
| 107 |
+
"name": "菠菜豆腐湯",
|
| 108 |
+
"description": "嫩菠菜配軟豆腐,富含鐵質和蛋白質",
|
| 109 |
+
"suitable_for": ["高血壓", "糖尿病", "牙口不好"],
|
| 110 |
+
"image_url": "https://placehold.co/400x300?text=菠菜豆腐湯",
|
| 111 |
+
"nutrition": {
|
| 112 |
+
"calories": 85,
|
| 113 |
+
"protein": 8.0,
|
| 114 |
+
"fat": 4.0,
|
| 115 |
+
"carbs": 6.0,
|
| 116 |
+
"sodium": 180,
|
| 117 |
+
"fiber": 2.5,
|
| 118 |
+
"sugar": 2.0
|
| 119 |
+
},
|
| 120 |
+
"price": 10000, # 100元 in cents
|
| 121 |
+
"category": "蔬菜類",
|
| 122 |
+
"available": True
|
| 123 |
+
},
|
| 124 |
+
{
|
| 125 |
+
"id": 7,
|
| 126 |
+
"name": "山藥排骨湯",
|
| 127 |
+
"description": "滋補湯品,山藥健脾益胃,排骨補充鈣質",
|
| 128 |
+
"suitable_for": ["高血壓", "糖尿病", "營養不良"],
|
| 129 |
+
"image_url": "https://placehold.co/400x300?text=山藥排骨湯",
|
| 130 |
+
"nutrition": {
|
| 131 |
+
"calories": 165,
|
| 132 |
+
"protein": 12.0,
|
| 133 |
+
"fat": 10.0,
|
| 134 |
+
"carbs": 6.0,
|
| 135 |
+
"sodium": 220,
|
| 136 |
+
"fiber": 1.5,
|
| 137 |
+
"sugar": 2.5
|
| 138 |
+
},
|
| 139 |
+
"price": 18000, # 180元 in cents
|
| 140 |
+
"category": "湯品",
|
| 141 |
+
"available": True
|
| 142 |
+
},
|
| 143 |
+
{
|
| 144 |
+
"id": 8,
|
| 145 |
+
"name": "小米粥",
|
| 146 |
+
"description": "溫和養胃小米粥,易消化,適合早晚食用",
|
| 147 |
+
"suitable_for": ["糖尿病", "牙口不好", "清淡飲食"],
|
| 148 |
+
"image_url": "https://placehold.co/400x300?text=小米粥",
|
| 149 |
+
"nutrition": {
|
| 150 |
+
"calories": 120,
|
| 151 |
+
"protein": 4.0,
|
| 152 |
+
"fat": 2.0,
|
| 153 |
+
"carbs": 22.0,
|
| 154 |
+
"sodium": 50,
|
| 155 |
+
"fiber": 2.0,
|
| 156 |
+
"sugar": 1.0
|
| 157 |
+
},
|
| 158 |
+
"price": 5000, # 50元 in cents
|
| 159 |
+
"category": "主食類",
|
| 160 |
+
"available": True
|
| 161 |
+
},
|
| 162 |
+
{
|
| 163 |
+
"id": 9,
|
| 164 |
+
"name": "香菇雞湯",
|
| 165 |
+
"description": "香菇雞肉湯,鮮美營養,增強免疫力",
|
| 166 |
+
"suitable_for": ["高血壓", "營養不良", "清淡飲食"],
|
| 167 |
+
"image_url": "https://placehold.co/400x300?text=香菇雞湯",
|
| 168 |
+
"nutrition": {
|
| 169 |
+
"calories": 150,
|
| 170 |
+
"protein": 20.0,
|
| 171 |
+
"fat": 6.0,
|
| 172 |
+
"carbs": 3.0,
|
| 173 |
+
"sodium": 280,
|
| 174 |
+
"fiber": 1.0,
|
| 175 |
+
"sugar": 1.5
|
| 176 |
+
},
|
| 177 |
+
"price": 16000, # 160元 in cents
|
| 178 |
+
"category": "湯品",
|
| 179 |
+
"available": True
|
| 180 |
+
},
|
| 181 |
+
{
|
| 182 |
+
"id": 10,
|
| 183 |
+
"name": "冬瓜排骨湯",
|
| 184 |
+
"description": "清淡冬瓜湯,利尿消腫,適合夏季食用",
|
| 185 |
+
"suitable_for": ["高血壓", "糖尿病", "水腫"],
|
| 186 |
+
"image_url": "https://placehold.co/400x300?text=冬瓜排骨湯",
|
| 187 |
+
"nutrition": {
|
| 188 |
+
"calories": 110,
|
| 189 |
+
"protein": 10.0,
|
| 190 |
+
"fat": 6.0,
|
| 191 |
+
"carbs": 5.0,
|
| 192 |
+
"sodium": 200,
|
| 193 |
+
"fiber": 1.5,
|
| 194 |
+
"sugar": 3.0
|
| 195 |
+
},
|
| 196 |
+
"price": 14000, # 140元 in cents
|
| 197 |
+
"category": "湯品",
|
| 198 |
+
"available": True
|
| 199 |
+
},
|
| 200 |
+
{
|
| 201 |
+
"id": 11,
|
| 202 |
+
"name": "紫薯泥",
|
| 203 |
+
"description": "軟糯紫薯泥,富含花青素和膳食纖維",
|
| 204 |
+
"suitable_for": ["糖尿病", "便秘", "抗氧化"],
|
| 205 |
+
"image_url": "https://placehold.co/400x300?text=紫薯泥",
|
| 206 |
+
"nutrition": {
|
| 207 |
+
"calories": 130,
|
| 208 |
+
"protein": 2.5,
|
| 209 |
+
"fat": 0.3,
|
| 210 |
+
"carbs": 30.0,
|
| 211 |
+
"sodium": 30,
|
| 212 |
+
"fiber": 4.0,
|
| 213 |
+
"sugar": 8.0
|
| 214 |
+
},
|
| 215 |
+
"price": 7000, # 70元 in cents
|
| 216 |
+
"category": "副食類",
|
| 217 |
+
"available": True
|
| 218 |
+
},
|
| 219 |
+
{
|
| 220 |
+
"id": 12,
|
| 221 |
+
"name": "蓮子銀耳湯",
|
| 222 |
+
"description": "養生甜湯,蓮子寧心安神,銀耳潤肺",
|
| 223 |
+
"suitable_for": ["失眠", "便秘", "美容養顏"],
|
| 224 |
+
"image_url": "https://placehold.co/400x300?text=蓮子銀耳湯",
|
| 225 |
+
"nutrition": {
|
| 226 |
+
"calories": 85,
|
| 227 |
+
"protein": 2.0,
|
| 228 |
+
"fat": 0.2,
|
| 229 |
+
"carbs": 20.0,
|
| 230 |
+
"sodium": 15,
|
| 231 |
+
"fiber": 3.5,
|
| 232 |
+
"sugar": 12.0
|
| 233 |
+
},
|
| 234 |
+
"price": 11000, # 110元 in cents
|
| 235 |
+
"category": "甜品類",
|
| 236 |
+
"available": True
|
| 237 |
+
},
|
| 238 |
+
{
|
| 239 |
+
"id": 13,
|
| 240 |
+
"name": "蒸蛋",
|
| 241 |
+
"description": "嫩滑蒸蛋,簡單易消化,營養豐富",
|
| 242 |
+
"suitable_for": ["牙口不好", "營養不良", "清淡飲食"],
|
| 243 |
+
"image_url": "https://placehold.co/400x300?text=蒸蛋",
|
| 244 |
+
"nutrition": {
|
| 245 |
+
"calories": 100,
|
| 246 |
+
"protein": 8.0,
|
| 247 |
+
"fat": 7.0,
|
| 248 |
+
"carbs": 1.0,
|
| 249 |
+
"sodium": 70,
|
| 250 |
+
"fiber": 0.0,
|
| 251 |
+
"sugar": 1.0
|
| 252 |
+
},
|
| 253 |
+
"price": 4000, # 40元 in cents
|
| 254 |
+
"category": "蛋白質類",
|
| 255 |
+
"available": True
|
| 256 |
+
},
|
| 257 |
+
{
|
| 258 |
+
"id": 14,
|
| 259 |
+
"name": "木瓜牛奶",
|
| 260 |
+
"description": "溫潤木瓜牛奶,有助消化,補充維生素",
|
| 261 |
+
"suitable_for": ["便秘", "美容養顏", "清淡飲食"],
|
| 262 |
+
"image_url": "https://placehold.co/400x300?text=木瓜牛奶",
|
| 263 |
+
"nutrition": {
|
| 264 |
+
"calories": 150,
|
| 265 |
+
"protein": 8.0,
|
| 266 |
+
"fat": 6.0,
|
| 267 |
+
"carbs": 18.0,
|
| 268 |
+
"sodium": 120,
|
| 269 |
+
"fiber": 2.0,
|
| 270 |
+
"sugar": 16.0
|
| 271 |
+
},
|
| 272 |
+
"price": 9000, # 90元 in cents
|
| 273 |
+
"category": "飲品類",
|
| 274 |
+
"available": True
|
| 275 |
+
},
|
| 276 |
+
{
|
| 277 |
+
"id": 15,
|
| 278 |
+
"name": "豆腐腦",
|
| 279 |
+
"description": "嫩滑豆腐腦,易吞嚥,富含植物蛋白",
|
| 280 |
+
"suitable_for": ["牙口不好", "高血壓", "清淡飲食"],
|
| 281 |
+
"image_url": "https://placehold.co/400x300?text=豆腐腦",
|
| 282 |
+
"nutrition": {
|
| 283 |
+
"calories": 80,
|
| 284 |
+
"protein": 6.0,
|
| 285 |
+
"fat": 4.0,
|
| 286 |
+
"carbs": 4.0,
|
| 287 |
+
"sodium": 160,
|
| 288 |
+
"fiber": 1.0,
|
| 289 |
+
"sugar": 2.0
|
| 290 |
+
},
|
| 291 |
+
"price": 3500, # 35元 in cents
|
| 292 |
+
"category": "蛋白質類",
|
| 293 |
+
"available": True
|
| 294 |
+
},
|
| 295 |
+
{
|
| 296 |
+
"id": 16,
|
| 297 |
+
"name": "燕窩粥",
|
| 298 |
+
"description": "滋補燕窩粥,美容養顏,滋陰潤燥",
|
| 299 |
+
"suitable_for": ["糖尿病", "美容養顏", "營養不良"],
|
| 300 |
+
"image_url": "https://placehold.co/400x300?text=燕窩粥",
|
| 301 |
+
"nutrition": {
|
| 302 |
+
"calories": 140,
|
| 303 |
+
"protein": 6.0,
|
| 304 |
+
"fat": 1.5,
|
| 305 |
+
"carbs": 26.0,
|
| 306 |
+
"sodium": 40,
|
| 307 |
+
"fiber": 1.0,
|
| 308 |
+
"sugar": 3.0
|
| 309 |
+
},
|
| 310 |
+
"price": 35000, # 350元 in cents
|
| 311 |
+
"category": "主食類",
|
| 312 |
+
"available": True
|
| 313 |
+
},
|
| 314 |
+
{
|
| 315 |
+
"id": 17,
|
| 316 |
+
"name": "胡蘿蔔泥",
|
| 317 |
+
"description": "軟嫩胡蘿蔔泥,富含維生素A,保護視力",
|
| 318 |
+
"suitable_for": ["高血壓", "便秘", "護眼"],
|
| 319 |
+
"image_url": "https://placehold.co/400x300?text=胡蘿蔔泥",
|
| 320 |
+
"nutrition": {
|
| 321 |
+
"calories": 60,
|
| 322 |
+
"protein": 1.5,
|
| 323 |
+
"fat": 0.3,
|
| 324 |
+
"carbs": 14.0,
|
| 325 |
+
"sodium": 80,
|
| 326 |
+
"fiber": 3.5,
|
| 327 |
+
"sugar": 7.0
|
| 328 |
+
},
|
| 329 |
+
"price": 4500, # 45元 in cents
|
| 330 |
+
"category": "副食類",
|
| 331 |
+
"available": True
|
| 332 |
+
},
|
| 333 |
+
{
|
| 334 |
+
"id": 18,
|
| 335 |
+
"name": "綠豆湯",
|
| 336 |
+
"description": "清熱解毒綠豆湯,消暑降火,利尿",
|
| 337 |
+
"suitable_for": ["高血壓", "水腫", "清熱解毒"],
|
| 338 |
+
"image_url": "https://placehold.co/400x300?text=綠豆湯",
|
| 339 |
+
"nutrition": {
|
| 340 |
+
"calories": 110,
|
| 341 |
+
"protein": 7.0,
|
| 342 |
+
"fat": 0.5,
|
| 343 |
+
"carbs": 22.0,
|
| 344 |
+
"sodium": 25,
|
| 345 |
+
"fiber": 6.0,
|
| 346 |
+
"sugar": 2.0
|
| 347 |
+
},
|
| 348 |
+
"price": 6500, # 65元 in cents
|
| 349 |
+
"category": "甜品類",
|
| 350 |
+
"available": True
|
| 351 |
+
},
|
| 352 |
+
{
|
| 353 |
+
"id": 19,
|
| 354 |
+
"name": "雞肉粥",
|
| 355 |
+
"description": "營養雞肉粥,溫和養胃,適合病後調養",
|
| 356 |
+
"suitable_for": ["牙口不好", "營養不良", "腸胃虚弱"],
|
| 357 |
+
"image_url": "https://placehold.co/400x300?text=雞肉粥",
|
| 358 |
+
"nutrition": {
|
| 359 |
+
"calories": 160,
|
| 360 |
+
"protein": 12.0,
|
| 361 |
+
"fat": 4.0,
|
| 362 |
+
"carbs": 18.0,
|
| 363 |
+
"sodium": 180,
|
| 364 |
+
"fiber": 1.0,
|
| 365 |
+
"sugar": 1.5
|
| 366 |
+
},
|
| 367 |
+
"price": 15000, # 150元 in cents
|
| 368 |
+
"category": "主食類",
|
| 369 |
+
"available": True
|
| 370 |
+
},
|
| 371 |
+
{
|
| 372 |
+
"id": 20,
|
| 373 |
+
"name": "紫菜蛋花湯",
|
| 374 |
+
"description": "清淡紫菜湯,富含碘質,有助甲狀腺健康",
|
| 375 |
+
"suitable_for": ["高血壓", "糖尿病", "清淡飲食"],
|
| 376 |
+
"image_url": "https://placehold.co/400x300?text=紫菜蛋花湯",
|
| 377 |
+
"nutrition": {
|
| 378 |
+
"calories": 40,
|
| 379 |
+
"protein": 3.0,
|
| 380 |
+
"fat": 2.0,
|
| 381 |
+
"carbs": 2.0,
|
| 382 |
+
"sodium": 200,
|
| 383 |
+
"fiber": 1.0,
|
| 384 |
+
"sugar": 1.0
|
| 385 |
+
},
|
| 386 |
+
"price": 5500, # 55元 in cents
|
| 387 |
+
"category": "湯品",
|
| 388 |
+
"available": True
|
| 389 |
+
}
|
| 390 |
+
]
|
| 391 |
+
|
| 392 |
+
|
| 393 |
+
def get_menu_items() -> List[Dict[str, Any]]:
|
| 394 |
+
"""Return the complete list of menu items."""
|
| 395 |
+
return MENU_ITEMS
|
| 396 |
+
|
| 397 |
+
|
| 398 |
+
def get_menu_item_by_id(item_id: int) -> Dict[str, Any]:
|
| 399 |
+
"""Get a specific menu item by ID."""
|
| 400 |
+
for item in MENU_ITEMS:
|
| 401 |
+
if item["id"] == item_id:
|
| 402 |
+
return item
|
| 403 |
+
raise ValueError(f"Menu item with ID {item_id} not found")
|
| 404 |
+
|
| 405 |
+
|
| 406 |
+
def filter_menu_by_dietary_tag(tag: str) -> List[Dict[str, Any]]:
|
| 407 |
+
"""Filter menu items by dietary tag."""
|
| 408 |
+
return [item for item in MENU_ITEMS if tag in item["suitable_for"]]
|
| 409 |
+
|
| 410 |
+
|
| 411 |
+
def get_menu_items_by_category(category: str) -> List[Dict[str, Any]]:
|
| 412 |
+
"""Filter menu items by category."""
|
| 413 |
+
return [item for item in MENU_ITEMS if item["category"] == category]
|
models.py
ADDED
|
@@ -0,0 +1,254 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
SQLModel database models for Silver Table Assistant.
|
| 3 |
+
Defines the data structure for profiles, orders, donations, and menu items.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
from datetime import datetime
|
| 7 |
+
from typing import Optional, Dict, Any, List
|
| 8 |
+
from uuid import UUID, uuid4
|
| 9 |
+
|
| 10 |
+
from sqlmodel import SQLModel, Field, Column, JSON
|
| 11 |
+
from sqlalchemy import Column as SQLColumn, DateTime
|
| 12 |
+
from sqlalchemy.dialects.postgresql import UUID as PostgresUUID
|
| 13 |
+
from sqlalchemy.sql import func
|
| 14 |
+
from enum import Enum
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class OrderStatus(str, Enum):
|
| 18 |
+
"""Order status enumeration."""
|
| 19 |
+
PENDING = "pending"
|
| 20 |
+
PAID = "paid"
|
| 21 |
+
COMPLETED = "completed"
|
| 22 |
+
CANCELLED = "cancelled"
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class DonationStatus(str, Enum):
|
| 26 |
+
"""Donation status enumeration."""
|
| 27 |
+
PENDING = "pending"
|
| 28 |
+
COMPLETED = "completed"
|
| 29 |
+
FAILED = "failed"
|
| 30 |
+
CANCELLED = "cancelled"
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class Profile(SQLModel, table=True):
|
| 34 |
+
"""Senior profile model for storing health and dietary information."""
|
| 35 |
+
|
| 36 |
+
id: UUID = Field(
|
| 37 |
+
default_factory=uuid4,
|
| 38 |
+
sa_column=SQLColumn(PostgresUUID(as_uuid=True), primary_key=True)
|
| 39 |
+
)
|
| 40 |
+
user_id: UUID = Field(
|
| 41 |
+
description="Reference to the owner user",
|
| 42 |
+
index=True
|
| 43 |
+
)
|
| 44 |
+
name: str = Field(
|
| 45 |
+
max_length=255,
|
| 46 |
+
description="Senior's name"
|
| 47 |
+
)
|
| 48 |
+
age: int = Field(
|
| 49 |
+
description="Senior's age",
|
| 50 |
+
ge=0,
|
| 51 |
+
le=150
|
| 52 |
+
)
|
| 53 |
+
gender: str = Field(
|
| 54 |
+
max_length=20,
|
| 55 |
+
description="Senior's gender (male/female)"
|
| 56 |
+
)
|
| 57 |
+
height: float = Field(
|
| 58 |
+
description="Senior's height in cm"
|
| 59 |
+
)
|
| 60 |
+
weight: float = Field(
|
| 61 |
+
description="Senior's weight in kg"
|
| 62 |
+
)
|
| 63 |
+
# Using JSON for lists to maintain simplicity across DBs
|
| 64 |
+
chronic_diseases: List[str] = Field(
|
| 65 |
+
default_factory=list,
|
| 66 |
+
sa_column=SQLColumn(JSON)
|
| 67 |
+
)
|
| 68 |
+
dietary_restrictions: List[str] = Field(
|
| 69 |
+
default_factory=list,
|
| 70 |
+
sa_column=SQLColumn(JSON)
|
| 71 |
+
)
|
| 72 |
+
chewing_ability: str = Field(
|
| 73 |
+
default="normal",
|
| 74 |
+
max_length=50,
|
| 75 |
+
description="normal, soft, or pureed"
|
| 76 |
+
)
|
| 77 |
+
avatar_url: Optional[str] = Field(
|
| 78 |
+
default=None,
|
| 79 |
+
max_length=500
|
| 80 |
+
)
|
| 81 |
+
created_at: datetime = Field(
|
| 82 |
+
default_factory=func.now,
|
| 83 |
+
sa_column=SQLColumn(DateTime(timezone=True), server_default=func.now())
|
| 84 |
+
)
|
| 85 |
+
updated_at: datetime = Field(
|
| 86 |
+
default_factory=datetime.utcnow,
|
| 87 |
+
sa_column=SQLColumn(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())
|
| 88 |
+
)
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
class Order(SQLModel, table=True):
|
| 92 |
+
"""Order model for storing food orders and payment information."""
|
| 93 |
+
|
| 94 |
+
id: Optional[UUID] = Field(
|
| 95 |
+
default_factory=uuid4,
|
| 96 |
+
sa_column=SQLColumn(PostgresUUID(as_uuid=True), primary_key=True)
|
| 97 |
+
)
|
| 98 |
+
profile_id: UUID = Field(
|
| 99 |
+
foreign_key="profile.id",
|
| 100 |
+
description="Reference to the profile that made this order",
|
| 101 |
+
index=True
|
| 102 |
+
)
|
| 103 |
+
items: Dict[str, Any] = Field(
|
| 104 |
+
sa_column=Column(JSON),
|
| 105 |
+
description="JSON object containing order items with quantities and details"
|
| 106 |
+
)
|
| 107 |
+
total_amount: int = Field(
|
| 108 |
+
description="Total order amount in cents",
|
| 109 |
+
ge=0
|
| 110 |
+
)
|
| 111 |
+
status: str = Field(
|
| 112 |
+
default=OrderStatus.PENDING.value,
|
| 113 |
+
description="Order status: pending, paid, completed, cancelled",
|
| 114 |
+
max_length=50
|
| 115 |
+
)
|
| 116 |
+
stripe_session_id: Optional[str] = Field(
|
| 117 |
+
default=None,
|
| 118 |
+
description="Stripe checkout session ID for payment tracking",
|
| 119 |
+
max_length=1000
|
| 120 |
+
)
|
| 121 |
+
created_at: datetime = Field(
|
| 122 |
+
default_factory=func.now,
|
| 123 |
+
sa_column=SQLColumn(DateTime(timezone=True), server_default=func.now())
|
| 124 |
+
)
|
| 125 |
+
updated_at: datetime = Field(
|
| 126 |
+
default_factory=datetime.utcnow,
|
| 127 |
+
sa_column=SQLColumn(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())
|
| 128 |
+
)
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
class Donation(SQLModel, table=True):
|
| 132 |
+
"""Donation model for storing donation information and payment details."""
|
| 133 |
+
|
| 134 |
+
id: Optional[UUID] = Field(
|
| 135 |
+
default_factory=uuid4,
|
| 136 |
+
sa_column=SQLColumn(PostgresUUID(as_uuid=True), primary_key=True)
|
| 137 |
+
)
|
| 138 |
+
user_id: Optional[UUID] = Field(
|
| 139 |
+
default=None,
|
| 140 |
+
description="Optional reference to authenticated user",
|
| 141 |
+
index=True
|
| 142 |
+
)
|
| 143 |
+
donor_name: Optional[str] = Field(
|
| 144 |
+
default=None,
|
| 145 |
+
description="Donor's name (optional for anonymous donations)",
|
| 146 |
+
max_length=255
|
| 147 |
+
)
|
| 148 |
+
amount: int = Field(
|
| 149 |
+
description="Donation amount in cents",
|
| 150 |
+
ge=1
|
| 151 |
+
)
|
| 152 |
+
status: str = Field(
|
| 153 |
+
default=DonationStatus.PENDING.value,
|
| 154 |
+
description="Donation status: pending, completed, failed, cancelled",
|
| 155 |
+
max_length=50
|
| 156 |
+
)
|
| 157 |
+
stripe_session_id: Optional[str] = Field(
|
| 158 |
+
default=None,
|
| 159 |
+
description="Stripe checkout session ID for payment tracking",
|
| 160 |
+
max_length=1000
|
| 161 |
+
)
|
| 162 |
+
created_at: datetime = Field(
|
| 163 |
+
default_factory=func.now,
|
| 164 |
+
sa_column=SQLColumn(DateTime(timezone=True), server_default=func.now())
|
| 165 |
+
)
|
| 166 |
+
updated_at: datetime = Field(
|
| 167 |
+
default_factory=datetime.utcnow,
|
| 168 |
+
sa_column=SQLColumn(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())
|
| 169 |
+
)
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
class MenuItem(SQLModel, table=True):
|
| 173 |
+
"""Menu item model for storing available food items and their nutritional information."""
|
| 174 |
+
|
| 175 |
+
id: Optional[int] = Field(
|
| 176 |
+
default=None,
|
| 177 |
+
primary_key=True,
|
| 178 |
+
description="Menu item ID"
|
| 179 |
+
)
|
| 180 |
+
name: str = Field(
|
| 181 |
+
max_length=255,
|
| 182 |
+
description="Name of the menu item"
|
| 183 |
+
)
|
| 184 |
+
description: Optional[str] = Field(
|
| 185 |
+
default=None,
|
| 186 |
+
description="Detailed description of the menu item",
|
| 187 |
+
max_length=1000
|
| 188 |
+
)
|
| 189 |
+
suitable_for: List[str] = Field(
|
| 190 |
+
default_factory=list,
|
| 191 |
+
sa_column=Column(JSON),
|
| 192 |
+
description="List of suitable health conditions (e.g., ['高血壓', '牙口不好'])"
|
| 193 |
+
)
|
| 194 |
+
image_url: Optional[str] = Field(
|
| 195 |
+
default=None,
|
| 196 |
+
description="URL to the menu item image",
|
| 197 |
+
max_length=500
|
| 198 |
+
)
|
| 199 |
+
nutrition: Dict[str, Any] = Field(
|
| 200 |
+
sa_column=Column(JSON),
|
| 201 |
+
description="JSON object containing nutritional information (calories, protein, carbs, etc.)"
|
| 202 |
+
)
|
| 203 |
+
price: int = Field(
|
| 204 |
+
description="Price of the menu item in cents",
|
| 205 |
+
ge=0
|
| 206 |
+
)
|
| 207 |
+
category: Optional[str] = Field(
|
| 208 |
+
default=None,
|
| 209 |
+
description="Food category (e.g., 蛋白質類, 葷食類)",
|
| 210 |
+
max_length=100
|
| 211 |
+
)
|
| 212 |
+
available: bool = Field(
|
| 213 |
+
default=True,
|
| 214 |
+
description="Whether this menu item is currently available"
|
| 215 |
+
)
|
| 216 |
+
created_at: datetime = Field(
|
| 217 |
+
default_factory=func.now,
|
| 218 |
+
sa_column=SQLColumn(DateTime(timezone=True), server_default=func.now())
|
| 219 |
+
)
|
| 220 |
+
|
| 221 |
+
|
| 222 |
+
# Chat conversation model for storing AI chat history
|
| 223 |
+
class ChatConversation(SQLModel, table=True):
|
| 224 |
+
"""Chat conversation model for storing AI chat history with users."""
|
| 225 |
+
|
| 226 |
+
id: Optional[int] = Field(
|
| 227 |
+
default=None,
|
| 228 |
+
primary_key=True,
|
| 229 |
+
description="Chat conversation ID"
|
| 230 |
+
)
|
| 231 |
+
profile_id: Optional[UUID] = Field(
|
| 232 |
+
default=None,
|
| 233 |
+
foreign_key="profile.id",
|
| 234 |
+
description="Reference to the profile",
|
| 235 |
+
index=True
|
| 236 |
+
)
|
| 237 |
+
message: str = Field(
|
| 238 |
+
description="User's message",
|
| 239 |
+
max_length=5000
|
| 240 |
+
)
|
| 241 |
+
response: Optional[str] = Field(
|
| 242 |
+
default=None,
|
| 243 |
+
description="AI assistant's response",
|
| 244 |
+
max_length=5000
|
| 245 |
+
)
|
| 246 |
+
meta_data: Optional[Dict[str, Any]] = Field(
|
| 247 |
+
default={},
|
| 248 |
+
sa_column=Column(JSON),
|
| 249 |
+
description="Additional metadata (e.g., context, recommendations)"
|
| 250 |
+
)
|
| 251 |
+
created_at: datetime = Field(
|
| 252 |
+
default_factory=func.now,
|
| 253 |
+
sa_column=SQLColumn(DateTime(timezone=True), server_default=func.now())
|
| 254 |
+
)
|
rag.py
ADDED
|
@@ -0,0 +1,469 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
RAG (Retrieval-Augmented Generation) service for Silver Table Assistant.
|
| 3 |
+
Handles document loading, vector storage, and similarity search using Supabase vector store.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
import logging
|
| 8 |
+
from pathlib import Path
|
| 9 |
+
from typing import List, Optional, Dict, Any
|
| 10 |
+
from uuid import uuid4
|
| 11 |
+
|
| 12 |
+
import asyncio
|
| 13 |
+
from langchain_openai import OpenAIEmbeddings
|
| 14 |
+
from langchain_community.document_loaders import PyPDFLoader, UnstructuredMarkdownLoader
|
| 15 |
+
from langchain_community.vectorstores import SupabaseVectorStore
|
| 16 |
+
from langchain_text_splitters import RecursiveCharacterTextSplitter
|
| 17 |
+
from supabase import create_client, Client
|
| 18 |
+
from langchain_core.documents import Document
|
| 19 |
+
from cache import DocumentCache, document_cache, cache_result
|
| 20 |
+
|
| 21 |
+
# Configure logging
|
| 22 |
+
logging.basicConfig(level=logging.INFO)
|
| 23 |
+
logger = logging.getLogger(__name__)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class RAGService:
|
| 27 |
+
"""RAG service for document management and similarity search."""
|
| 28 |
+
|
| 29 |
+
def __init__(self):
|
| 30 |
+
"""Initialize RAG service with OpenAI embeddings (via LiteLLM) and Supabase vector store."""
|
| 31 |
+
# Environment variables
|
| 32 |
+
self.supabase_url = os.getenv("SUPABASE_URL")
|
| 33 |
+
self.supabase_service_key = os.getenv("SUPABASE_SERVICE_ROLE_KEY")
|
| 34 |
+
self.openai_api_key = os.getenv("OPENAI_API_KEY") or os.getenv("LITELLM_API_KEY", "sk-eT_04m428oAPUD5kUmIhVA")
|
| 35 |
+
self.openai_base_url = os.getenv("OPENAI_BASE_URL") or os.getenv("LITELLM_BASE_URL", "https://litellm-ekkks8gsocw.dgx-coolify.apmic.ai/")
|
| 36 |
+
|
| 37 |
+
if not all([self.supabase_url, self.supabase_service_key, self.openai_api_key]):
|
| 38 |
+
raise ValueError("Missing required environment variables: SUPABASE_URL, SUPABASE_SERVICE_ROLE_KEY, OPENAI_API_KEY or LITELLM_API_KEY")
|
| 39 |
+
|
| 40 |
+
# Initialize OpenAI embeddings (works with LiteLLM compatible endpoints)
|
| 41 |
+
embed_kwargs = {
|
| 42 |
+
"model": "azure-text-embedding-3-large",
|
| 43 |
+
"openai_api_key": self.openai_api_key
|
| 44 |
+
}
|
| 45 |
+
if self.openai_base_url:
|
| 46 |
+
embed_kwargs["openai_api_base"] = self.openai_base_url
|
| 47 |
+
|
| 48 |
+
self.embeddings = OpenAIEmbeddings(**embed_kwargs)
|
| 49 |
+
logger.info(f"Initialized OpenAIEmbeddings with base_url: {self.openai_base_url}")
|
| 50 |
+
|
| 51 |
+
# Initialize Supabase client
|
| 52 |
+
self.supabase_client: Client = create_client(
|
| 53 |
+
self.supabase_url,
|
| 54 |
+
self.supabase_service_key
|
| 55 |
+
)
|
| 56 |
+
|
| 57 |
+
# Initialize Supabase vector store
|
| 58 |
+
self.vector_store = SupabaseVectorStore(
|
| 59 |
+
client=self.supabase_client,
|
| 60 |
+
embedding=self.embeddings,
|
| 61 |
+
table_name="documents",
|
| 62 |
+
query_name="match_documents"
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
# Text splitter for document chunking
|
| 66 |
+
self.text_splitter = RecursiveCharacterTextSplitter(
|
| 67 |
+
chunk_size=1000,
|
| 68 |
+
chunk_overlap=200,
|
| 69 |
+
length_function=len,
|
| 70 |
+
is_separator_regex=False,
|
| 71 |
+
)
|
| 72 |
+
|
| 73 |
+
async def load_knowledge_base(self, data_dir: str = "backend/data") -> Dict[str, Any]:
|
| 74 |
+
"""
|
| 75 |
+
Load and process documents from the data directory.
|
| 76 |
+
|
| 77 |
+
Args:
|
| 78 |
+
data_dir: Path to directory containing documents
|
| 79 |
+
|
| 80 |
+
Returns:
|
| 81 |
+
Dictionary with processing results
|
| 82 |
+
"""
|
| 83 |
+
logger.info(f"Loading knowledge base from {data_dir}")
|
| 84 |
+
|
| 85 |
+
data_path = Path(data_dir)
|
| 86 |
+
if not data_path.exists():
|
| 87 |
+
raise ValueError(f"Data directory {data_dir} does not exist")
|
| 88 |
+
|
| 89 |
+
# Track processing results
|
| 90 |
+
results = {
|
| 91 |
+
"total_files": 0,
|
| 92 |
+
"processed_files": 0,
|
| 93 |
+
"failed_files": 0,
|
| 94 |
+
"total_chunks": 0,
|
| 95 |
+
"errors": []
|
| 96 |
+
}
|
| 97 |
+
|
| 98 |
+
# Find all PDF and MD files
|
| 99 |
+
pdf_files = list(data_path.glob("**/*.pdf"))
|
| 100 |
+
md_files = list(data_path.glob("**/*.md"))
|
| 101 |
+
all_files = pdf_files + md_files
|
| 102 |
+
|
| 103 |
+
results["total_files"] = len(all_files)
|
| 104 |
+
|
| 105 |
+
if not all_files:
|
| 106 |
+
logger.warning(f"No PDF or MD files found in {data_dir}")
|
| 107 |
+
return results
|
| 108 |
+
|
| 109 |
+
logger.info(f"Found {len(all_files)} files to process")
|
| 110 |
+
|
| 111 |
+
# Process each file
|
| 112 |
+
for file_path in all_files:
|
| 113 |
+
try:
|
| 114 |
+
await self._process_file(file_path, results)
|
| 115 |
+
except Exception as e:
|
| 116 |
+
error_msg = f"Failed to process {file_path}: {str(e)}"
|
| 117 |
+
logger.error(error_msg)
|
| 118 |
+
results["errors"].append(error_msg)
|
| 119 |
+
results["failed_files"] += 1
|
| 120 |
+
|
| 121 |
+
logger.info(f"Knowledge base loading completed: {results}")
|
| 122 |
+
return results
|
| 123 |
+
|
| 124 |
+
async def _process_file(self, file_path: Path, results: Dict[str, Any]) -> None:
|
| 125 |
+
"""
|
| 126 |
+
Process a single file and add to vector store.
|
| 127 |
+
|
| 128 |
+
Args:
|
| 129 |
+
file_path: Path to the file
|
| 130 |
+
results: Results dictionary to update
|
| 131 |
+
"""
|
| 132 |
+
logger.info(f"Processing file: {file_path}")
|
| 133 |
+
|
| 134 |
+
# Load document based on file type
|
| 135 |
+
if file_path.suffix.lower() == ".pdf":
|
| 136 |
+
loader = PyPDFLoader(str(file_path))
|
| 137 |
+
documents = loader.load()
|
| 138 |
+
elif file_path.suffix.lower() == ".md":
|
| 139 |
+
# Try UnstructuredMarkdownLoader first, fallback to simple text loading
|
| 140 |
+
try:
|
| 141 |
+
loader = UnstructuredMarkdownLoader(str(file_path))
|
| 142 |
+
documents = loader.load()
|
| 143 |
+
except Exception:
|
| 144 |
+
# Fallback to simple text loading
|
| 145 |
+
with open(file_path, 'r', encoding='utf-8') as f:
|
| 146 |
+
content = f.read()
|
| 147 |
+
documents = [Document(page_content=content, metadata={"source": str(file_path)})]
|
| 148 |
+
else:
|
| 149 |
+
raise ValueError(f"Unsupported file type: {file_path.suffix}")
|
| 150 |
+
|
| 151 |
+
# Split documents into chunks
|
| 152 |
+
chunks = self.text_splitter.split_documents(documents)
|
| 153 |
+
|
| 154 |
+
# Add metadata to chunks
|
| 155 |
+
for chunk in chunks:
|
| 156 |
+
chunk.metadata["source"] = str(file_path)
|
| 157 |
+
chunk.metadata["file_name"] = file_path.name
|
| 158 |
+
chunk.metadata["chunk_id"] = str(uuid4())
|
| 159 |
+
|
| 160 |
+
# Add chunks to vector store
|
| 161 |
+
if chunks:
|
| 162 |
+
await self.vector_store.aadd_documents(chunks)
|
| 163 |
+
results["processed_files"] += 1
|
| 164 |
+
results["total_chunks"] += len(chunks)
|
| 165 |
+
logger.info(f"Added {len(chunks)} chunks from {file_path}")
|
| 166 |
+
else:
|
| 167 |
+
logger.warning(f"No chunks generated from {file_path}")
|
| 168 |
+
|
| 169 |
+
@cache_result(document_cache, "rag_documents", ttl=1800)
|
| 170 |
+
async def get_relevant_documents(self, query: str, k: int = 8) -> List[Document]:
|
| 171 |
+
"""
|
| 172 |
+
Perform similarity search to find relevant documents with caching.
|
| 173 |
+
|
| 174 |
+
Args:
|
| 175 |
+
query: Search query
|
| 176 |
+
k: Number of documents to return (default: 8)
|
| 177 |
+
|
| 178 |
+
Returns:
|
| 179 |
+
List of relevant Document objects
|
| 180 |
+
"""
|
| 181 |
+
logger.info(f"Searching for relevant documents with query: '{query}' (k={k})")
|
| 182 |
+
|
| 183 |
+
try:
|
| 184 |
+
# Check cache first
|
| 185 |
+
cached_results = DocumentCache.get_relevant_documents(query, k)
|
| 186 |
+
if cached_results is not None:
|
| 187 |
+
logger.info(f"Returning cached results for query: '{query}'")
|
| 188 |
+
return cached_results
|
| 189 |
+
|
| 190 |
+
# Perform similarity search
|
| 191 |
+
try:
|
| 192 |
+
results = await self.vector_store.asimilarity_search(query, k=k)
|
| 193 |
+
except Exception as e:
|
| 194 |
+
if "'SyncRPCFilterRequestBuilder' object has no attribute 'params'" in str(e) or "'AsyncRPCFilterRequestBuilder' object has no attribute 'params'" in str(e):
|
| 195 |
+
logger.warning(f"SupabaseVectorStore incompatibility detected, using manual RPC: {str(e)}")
|
| 196 |
+
# Manual RPC fallback
|
| 197 |
+
embedding = await self.embeddings.aembed_query(query)
|
| 198 |
+
res = self.supabase_client.rpc(
|
| 199 |
+
"match_documents",
|
| 200 |
+
{
|
| 201 |
+
"query_embedding": embedding,
|
| 202 |
+
"match_threshold": 0.1,
|
| 203 |
+
"match_count": k,
|
| 204 |
+
}
|
| 205 |
+
).execute()
|
| 206 |
+
|
| 207 |
+
results = []
|
| 208 |
+
for row in res.data:
|
| 209 |
+
results.append(Document(
|
| 210 |
+
page_content=row["content"],
|
| 211 |
+
metadata=row["metadata"]
|
| 212 |
+
))
|
| 213 |
+
else:
|
| 214 |
+
raise e
|
| 215 |
+
|
| 216 |
+
# Cache the results
|
| 217 |
+
DocumentCache.set_relevant_documents(query, k, results)
|
| 218 |
+
|
| 219 |
+
logger.info(f"Found {len(results)} relevant documents")
|
| 220 |
+
return results
|
| 221 |
+
except Exception as e:
|
| 222 |
+
logger.error(f"Error during similarity search: {str(e)}")
|
| 223 |
+
return []
|
| 224 |
+
|
| 225 |
+
@cache_result(document_cache, "rag_documents_scored", ttl=1800)
|
| 226 |
+
async def get_relevant_documents_with_scores(self, query: str, k: int = 8, score_threshold: float = 0.7) -> List[Document]:
|
| 227 |
+
"""
|
| 228 |
+
Perform similarity search with score threshold and pagination support.
|
| 229 |
+
|
| 230 |
+
Args:
|
| 231 |
+
query: Search query
|
| 232 |
+
k: Number of documents to return
|
| 233 |
+
score_threshold: Minimum similarity score
|
| 234 |
+
|
| 235 |
+
Returns:
|
| 236 |
+
List of relevant Document objects above threshold
|
| 237 |
+
"""
|
| 238 |
+
logger.info(f"Searching for relevant documents with query: '{query}' (k={k}, threshold={score_threshold})")
|
| 239 |
+
|
| 240 |
+
try:
|
| 241 |
+
# Check cache first
|
| 242 |
+
cached_results = DocumentCache.get_relevant_documents(query, k, score_threshold)
|
| 243 |
+
if cached_results is not None:
|
| 244 |
+
logger.info(f"Returning cached scored results for query: '{query}'")
|
| 245 |
+
return cached_results
|
| 246 |
+
|
| 247 |
+
# Perform similarity search with scores
|
| 248 |
+
try:
|
| 249 |
+
results = await self.vector_store.asimilarity_search_with_score(query, k=k*2) # Get more to filter
|
| 250 |
+
filtered_results = [doc for doc, score in results if score >= score_threshold][:k]
|
| 251 |
+
except Exception as e:
|
| 252 |
+
if "'SyncRPCFilterRequestBuilder' object has no attribute 'params'" in str(e) or "'AsyncRPCFilterRequestBuilder' object has no attribute 'params'" in str(e):
|
| 253 |
+
logger.warning(f"SupabaseVectorStore incompatibility detected in scored search, using manual RPC: {str(e)}")
|
| 254 |
+
# Manual RPC fallback
|
| 255 |
+
embedding = await self.embeddings.aembed_query(query)
|
| 256 |
+
res = self.supabase_client.rpc(
|
| 257 |
+
"match_documents",
|
| 258 |
+
{
|
| 259 |
+
"query_embedding": embedding,
|
| 260 |
+
"match_threshold": score_threshold,
|
| 261 |
+
"match_count": k,
|
| 262 |
+
}
|
| 263 |
+
).execute()
|
| 264 |
+
|
| 265 |
+
filtered_results = []
|
| 266 |
+
for row in res.data:
|
| 267 |
+
filtered_results.append(Document(
|
| 268 |
+
page_content=row["content"],
|
| 269 |
+
metadata=row["metadata"]
|
| 270 |
+
))
|
| 271 |
+
else:
|
| 272 |
+
raise e
|
| 273 |
+
|
| 274 |
+
# Cache the results
|
| 275 |
+
DocumentCache.set_relevant_documents(query, k, filtered_results, score_threshold)
|
| 276 |
+
|
| 277 |
+
logger.info(f"Found {len(filtered_results)} relevant documents above threshold")
|
| 278 |
+
return filtered_results
|
| 279 |
+
except Exception as e:
|
| 280 |
+
logger.error(f"Error during similarity search with scores: {str(e)}")
|
| 281 |
+
return []
|
| 282 |
+
|
| 283 |
+
async def get_relevant_documents_paginated(
|
| 284 |
+
self,
|
| 285 |
+
query: str,
|
| 286 |
+
page: int = 1,
|
| 287 |
+
page_size: int = 10,
|
| 288 |
+
score_threshold: Optional[float] = None
|
| 289 |
+
) -> Dict[str, Any]:
|
| 290 |
+
"""
|
| 291 |
+
Perform paginated similarity search.
|
| 292 |
+
|
| 293 |
+
Args:
|
| 294 |
+
query: Search query
|
| 295 |
+
page: Page number (1-indexed)
|
| 296 |
+
page_size: Number of documents per page
|
| 297 |
+
score_threshold: Minimum similarity score (optional)
|
| 298 |
+
|
| 299 |
+
Returns:
|
| 300 |
+
Dictionary with documents, pagination info, and metadata
|
| 301 |
+
"""
|
| 302 |
+
logger.info(f"Paginated search for query: '{query}' (page={page}, page_size={page_size})")
|
| 303 |
+
|
| 304 |
+
try:
|
| 305 |
+
# Calculate total number needed (get more for pagination)
|
| 306 |
+
total_needed = page * page_size
|
| 307 |
+
|
| 308 |
+
# Perform search with more results for pagination
|
| 309 |
+
if score_threshold:
|
| 310 |
+
results = await self.get_relevant_documents_with_scores(query, k=total_needed, score_threshold=score_threshold)
|
| 311 |
+
else:
|
| 312 |
+
results = await self.get_relevant_documents(query, k=total_needed)
|
| 313 |
+
|
| 314 |
+
# Paginate results
|
| 315 |
+
start_idx = (page - 1) * page_size
|
| 316 |
+
end_idx = start_idx + page_size
|
| 317 |
+
paginated_results = results[start_idx:end_idx]
|
| 318 |
+
|
| 319 |
+
# Calculate pagination metadata
|
| 320 |
+
total_results = len(results)
|
| 321 |
+
total_pages = (total_results + page_size - 1) // page_size # Ceiling division
|
| 322 |
+
has_next = page < total_pages
|
| 323 |
+
has_prev = page > 1
|
| 324 |
+
|
| 325 |
+
return {
|
| 326 |
+
"documents": paginated_results,
|
| 327 |
+
"pagination": {
|
| 328 |
+
"page": page,
|
| 329 |
+
"page_size": page_size,
|
| 330 |
+
"total_results": total_results,
|
| 331 |
+
"total_pages": total_pages,
|
| 332 |
+
"has_next": has_next,
|
| 333 |
+
"has_prev": has_prev,
|
| 334 |
+
"start_index": start_idx,
|
| 335 |
+
"end_index": end_idx
|
| 336 |
+
},
|
| 337 |
+
"query": query,
|
| 338 |
+
"score_threshold": score_threshold
|
| 339 |
+
}
|
| 340 |
+
|
| 341 |
+
except Exception as e:
|
| 342 |
+
logger.error(f"Error during paginated search: {str(e)}")
|
| 343 |
+
return {
|
| 344 |
+
"documents": [],
|
| 345 |
+
"pagination": {
|
| 346 |
+
"page": page,
|
| 347 |
+
"page_size": page_size,
|
| 348 |
+
"total_results": 0,
|
| 349 |
+
"total_pages": 0,
|
| 350 |
+
"has_next": False,
|
| 351 |
+
"has_prev": False,
|
| 352 |
+
"start_index": 0,
|
| 353 |
+
"end_index": 0
|
| 354 |
+
},
|
| 355 |
+
"query": query,
|
| 356 |
+
"score_threshold": score_threshold,
|
| 357 |
+
"error": str(e)
|
| 358 |
+
}
|
| 359 |
+
|
| 360 |
+
async def get_document_count(self) -> int:
|
| 361 |
+
"""
|
| 362 |
+
Get the total number of documents in the vector store.
|
| 363 |
+
|
| 364 |
+
Returns:
|
| 365 |
+
Total number of documents
|
| 366 |
+
"""
|
| 367 |
+
try:
|
| 368 |
+
# This is a simplified approach - in a real implementation,
|
| 369 |
+
# you'd want to query the Supabase table directly
|
| 370 |
+
result = self.supabase_client.table("documents").select("id", count="exact").execute()
|
| 371 |
+
return result.count if result.count else 0
|
| 372 |
+
except Exception as e:
|
| 373 |
+
logger.error(f"Error getting document count: {str(e)}")
|
| 374 |
+
return 0
|
| 375 |
+
|
| 376 |
+
async def clear_knowledge_base(self) -> bool:
|
| 377 |
+
"""
|
| 378 |
+
Clear all documents from the vector store.
|
| 379 |
+
|
| 380 |
+
Returns:
|
| 381 |
+
True if successful, False otherwise
|
| 382 |
+
"""
|
| 383 |
+
try:
|
| 384 |
+
# Delete all documents from the table
|
| 385 |
+
result = self.supabase_client.table("documents").delete().gte("id", "").execute()
|
| 386 |
+
logger.info("Knowledge base cleared successfully")
|
| 387 |
+
return True
|
| 388 |
+
except Exception as e:
|
| 389 |
+
logger.error(f"Error clearing knowledge base: {str(e)}")
|
| 390 |
+
return False
|
| 391 |
+
|
| 392 |
+
|
| 393 |
+
# Global RAG service instance
|
| 394 |
+
rag_service: Optional[RAGService] = None
|
| 395 |
+
|
| 396 |
+
|
| 397 |
+
def get_rag_service() -> RAGService:
|
| 398 |
+
"""
|
| 399 |
+
Get or create the global RAG service instance.
|
| 400 |
+
|
| 401 |
+
Returns:
|
| 402 |
+
RAGService instance
|
| 403 |
+
"""
|
| 404 |
+
global rag_service
|
| 405 |
+
if rag_service is None:
|
| 406 |
+
rag_service = RAGService()
|
| 407 |
+
return rag_service
|
| 408 |
+
|
| 409 |
+
|
| 410 |
+
# Convenience functions for backward compatibility
|
| 411 |
+
async def load_knowledge_base(data_dir: str = "backend/data") -> Dict[str, Any]:
|
| 412 |
+
"""Load knowledge base documents."""
|
| 413 |
+
service = get_rag_service()
|
| 414 |
+
return await service.load_knowledge_base(data_dir)
|
| 415 |
+
|
| 416 |
+
|
| 417 |
+
async def get_relevant_documents(query: str, k: int = 8) -> List[Document]:
|
| 418 |
+
"""Get relevant documents for a query."""
|
| 419 |
+
service = get_rag_service()
|
| 420 |
+
return await service.get_relevant_documents(query, k)
|
| 421 |
+
|
| 422 |
+
|
| 423 |
+
async def get_relevant_documents_with_scores(query: str, k: int = 8, score_threshold: float = 0.7) -> List[Document]:
|
| 424 |
+
"""Get relevant documents with similarity scores."""
|
| 425 |
+
service = get_rag_service()
|
| 426 |
+
return await service.get_relevant_documents_with_scores(query, k, score_threshold)
|
| 427 |
+
|
| 428 |
+
|
| 429 |
+
if __name__ == "__main__":
|
| 430 |
+
"""
|
| 431 |
+
Main block for testing and manual knowledge base loading.
|
| 432 |
+
"""
|
| 433 |
+
async def main():
|
| 434 |
+
"""Main function for testing."""
|
| 435 |
+
print("Loading knowledge base...")
|
| 436 |
+
|
| 437 |
+
try:
|
| 438 |
+
# Initialize and load knowledge base
|
| 439 |
+
service = get_rag_service()
|
| 440 |
+
results = await service.load_knowledge_base()
|
| 441 |
+
|
| 442 |
+
print(f"Knowledge base loading results:")
|
| 443 |
+
print(f"- Total files: {results['total_files']}")
|
| 444 |
+
print(f"- Processed files: {results['processed_files']}")
|
| 445 |
+
print(f"- Failed files: {results['failed_files']}")
|
| 446 |
+
print(f"- Total chunks: {results['total_chunks']}")
|
| 447 |
+
|
| 448 |
+
if results['errors']:
|
| 449 |
+
print(f"- Errors: {len(results['errors'])}")
|
| 450 |
+
for error in results['errors']:
|
| 451 |
+
print(f" * {error}")
|
| 452 |
+
|
| 453 |
+
# Test search functionality
|
| 454 |
+
test_query = "高血壓飲食建議"
|
| 455 |
+
print(f"\nTesting search with query: '{test_query}'")
|
| 456 |
+
documents = await service.get_relevant_documents(test_query)
|
| 457 |
+
|
| 458 |
+
print(f"Found {len(documents)} relevant documents:")
|
| 459 |
+
for i, doc in enumerate(documents, 1):
|
| 460 |
+
print(f"{i}. {doc.metadata.get('file_name', 'Unknown')} - {doc.page_content[:100]}...")
|
| 461 |
+
|
| 462 |
+
print(f"\nTotal documents in vector store: {await service.get_document_count()}")
|
| 463 |
+
|
| 464 |
+
except Exception as e:
|
| 465 |
+
print(f"Error: {str(e)}")
|
| 466 |
+
raise
|
| 467 |
+
|
| 468 |
+
# Run the main function
|
| 469 |
+
asyncio.run(main())
|
requirements.txt
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Web Framework
|
| 2 |
+
fastapi
|
| 3 |
+
uvicorn[standard]
|
| 4 |
+
gradio
|
| 5 |
+
|
| 6 |
+
# Database
|
| 7 |
+
sqlalchemy
|
| 8 |
+
sqlmodel
|
| 9 |
+
asyncpg
|
| 10 |
+
pgvector
|
| 11 |
+
|
| 12 |
+
# AI/ML
|
| 13 |
+
langchain
|
| 14 |
+
langchain-openai
|
| 15 |
+
langchain-community
|
| 16 |
+
tiktoken
|
| 17 |
+
|
| 18 |
+
# External Services
|
| 19 |
+
supabase
|
| 20 |
+
stripe
|
| 21 |
+
httpx
|
| 22 |
+
|
| 23 |
+
# Utilities
|
| 24 |
+
python-dotenv
|
| 25 |
+
pypdf
|
| 26 |
+
|
| 27 |
+
# Security and Validation
|
| 28 |
+
PyJWT
|
| 29 |
+
cryptography
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
# Development (optional)
|
| 33 |
+
# pytest==7.4.3
|
| 34 |
+
# pytest-asyncio==0.21.1
|
| 35 |
+
# greenlet==3.3.0
|
schemas.py
ADDED
|
@@ -0,0 +1,275 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Pydantic schemas for request/response models in Silver Table Assistant API.
|
| 3 |
+
Provides data validation and serialization for all API endpoints.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
from datetime import datetime
|
| 7 |
+
from typing import Optional, Dict, Any, List
|
| 8 |
+
from uuid import UUID
|
| 9 |
+
|
| 10 |
+
from pydantic import BaseModel, Field, validator
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
# ========== Chat Schemas ==========
|
| 14 |
+
|
| 15 |
+
class ChatRequest(BaseModel):
|
| 16 |
+
"""Schema for chat request messages."""
|
| 17 |
+
|
| 18 |
+
message: str = Field(..., min_length=1, max_length=2000, description="User's message to the assistant")
|
| 19 |
+
profile_id: Optional[UUID] = Field(default=None, description="Profile ID for personalized responses")
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class ChatResponse(BaseModel):
|
| 23 |
+
"""Schema for chat response messages."""
|
| 24 |
+
|
| 25 |
+
message: str = Field(..., description="Assistant's response message")
|
| 26 |
+
session_id: str = Field(..., description="Chat session identifier")
|
| 27 |
+
recommendations: Optional[List[Dict[str, Any]]] = Field(default=None, description="Food recommendations based on chat")
|
| 28 |
+
metadata: Optional[Dict[str, Any]] = Field(default=None, description="Additional response metadata")
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
# ========== Profile Schemas ==========
|
| 32 |
+
|
| 33 |
+
class ProfileBase(BaseModel):
|
| 34 |
+
"""Base profile schema with common fields."""
|
| 35 |
+
|
| 36 |
+
name: str = Field(..., min_length=1, max_length=255, description="Senior's name")
|
| 37 |
+
age: int = Field(..., ge=0, le=120, description="Senior's age")
|
| 38 |
+
gender: str = Field(..., description="Senior's gender (male/female)")
|
| 39 |
+
height: float = Field(..., ge=50, le=250, description="Senior's height in cm")
|
| 40 |
+
weight: float = Field(..., ge=20, le=200, description="Senior's weight in kg")
|
| 41 |
+
chronic_diseases: List[str] = Field(default_factory=list, description="List of chronic diseases")
|
| 42 |
+
dietary_restrictions: List[str] = Field(default_factory=list, description="List of dietary restrictions")
|
| 43 |
+
chewing_ability: str = Field(default="normal", description="normal, soft, or pureed")
|
| 44 |
+
avatar_url: Optional[str] = Field(default=None, max_length=500, description="Avatar image URL")
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
class ProfileCreate(ProfileBase):
|
| 48 |
+
"""Schema for creating a new profile."""
|
| 49 |
+
id: Optional[UUID] = Field(default=None, description="Optional Profile ID for updates")
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
class ProfileUpdate(BaseModel):
|
| 53 |
+
"""Schema for updating an existing profile."""
|
| 54 |
+
|
| 55 |
+
id: Optional[UUID] = Field(default=None)
|
| 56 |
+
name: Optional[str] = Field(default=None, min_length=1, max_length=255)
|
| 57 |
+
age: Optional[int] = Field(default=None, ge=0, le=120)
|
| 58 |
+
gender: Optional[str] = Field(default=None)
|
| 59 |
+
height: Optional[float] = Field(default=None, ge=50, le=250)
|
| 60 |
+
weight: Optional[float] = Field(default=None, ge=20, le=200)
|
| 61 |
+
chronic_diseases: Optional[List[str]] = Field(default=None)
|
| 62 |
+
dietary_restrictions: Optional[List[str]] = Field(default=None)
|
| 63 |
+
chewing_ability: Optional[str] = Field(default=None)
|
| 64 |
+
avatar_url: Optional[str] = Field(default=None, max_length=500)
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
class ProfileRead(ProfileBase):
|
| 68 |
+
"""Schema for reading profile data."""
|
| 69 |
+
|
| 70 |
+
id: UUID
|
| 71 |
+
user_id: UUID
|
| 72 |
+
created_at: datetime
|
| 73 |
+
updated_at: datetime
|
| 74 |
+
|
| 75 |
+
class Config:
|
| 76 |
+
from_attributes = True
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
# ========== Menu Item Schemas ==========
|
| 80 |
+
|
| 81 |
+
class MenuItemBase(BaseModel):
|
| 82 |
+
"""Base menu item schema with common fields."""
|
| 83 |
+
|
| 84 |
+
name: str = Field(..., min_length=1, max_length=255, description="Name of the menu item")
|
| 85 |
+
description: Optional[str] = Field(default=None, max_length=1000, description="Item description")
|
| 86 |
+
suitable_for: List[str] = Field(default_factory=list, description="Suitable for health conditions")
|
| 87 |
+
image_url: Optional[str] = Field(default=None, max_length=500, description="Image URL")
|
| 88 |
+
nutrition: Dict[str, Any] = Field(..., description="Nutritional information")
|
| 89 |
+
price: int = Field(..., ge=0, description="Price in cents")
|
| 90 |
+
category: Optional[str] = Field(default=None, max_length=100, description="Food category")
|
| 91 |
+
available: bool = Field(default=True, description="Availability status")
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
class MenuItemCreate(MenuItemBase):
|
| 95 |
+
"""Schema for creating a new menu item."""
|
| 96 |
+
pass
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
class MenuItemUpdate(BaseModel):
|
| 100 |
+
"""Schema for updating an existing menu item."""
|
| 101 |
+
|
| 102 |
+
name: Optional[str] = Field(default=None, min_length=1, max_length=255)
|
| 103 |
+
description: Optional[str] = Field(default=None, max_length=1000)
|
| 104 |
+
suitable_for: Optional[List[str]] = Field(default=None)
|
| 105 |
+
image_url: Optional[str] = Field(default=None, max_length=500)
|
| 106 |
+
nutrition: Optional[Dict[str, Any]] = Field(default=None)
|
| 107 |
+
price: Optional[int] = Field(default=None, ge=0)
|
| 108 |
+
category: Optional[str] = Field(default=None, max_length=100)
|
| 109 |
+
available: Optional[bool] = Field(default=None)
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
class MenuItemRead(MenuItemBase):
|
| 113 |
+
"""Schema for reading menu item data."""
|
| 114 |
+
|
| 115 |
+
id: int
|
| 116 |
+
created_at: datetime
|
| 117 |
+
|
| 118 |
+
class Config:
|
| 119 |
+
from_attributes = True
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
# ========== Order Schemas ==========
|
| 123 |
+
|
| 124 |
+
class OrderItem(BaseModel):
|
| 125 |
+
"""Schema for individual order items."""
|
| 126 |
+
|
| 127 |
+
menu_item_id: int
|
| 128 |
+
quantity: int = Field(..., gt=0, description="Quantity of this item")
|
| 129 |
+
special_instructions: Optional[str] = Field(default=None, max_length=500, description="Special preparation instructions")
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
class OrderBase(BaseModel):
|
| 133 |
+
"""Base order schema with common fields."""
|
| 134 |
+
|
| 135 |
+
items: List[OrderItem] = Field(..., min_items=1, description="List of order items")
|
| 136 |
+
total_amount: int = Field(..., ge=0, description="Total amount in cents")
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
class OrderCreate(OrderBase):
|
| 140 |
+
"""Schema for creating a new order."""
|
| 141 |
+
|
| 142 |
+
profile_id: UUID = Field(..., description="ID of the profile making the order")
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
class OrderUpdate(BaseModel):
|
| 146 |
+
"""Schema for updating an existing order."""
|
| 147 |
+
|
| 148 |
+
status: Optional[str] = Field(default=None, description="Order status")
|
| 149 |
+
items: Optional[List[OrderItem]] = Field(default=None, min_items=1)
|
| 150 |
+
total_amount: Optional[int] = Field(default=None, ge=0)
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
class OrderRead(OrderBase):
|
| 154 |
+
"""Schema for reading order data."""
|
| 155 |
+
|
| 156 |
+
id: UUID
|
| 157 |
+
profile_id: UUID
|
| 158 |
+
status: str
|
| 159 |
+
stripe_session_id: Optional[str]
|
| 160 |
+
checkout_url: Optional[str] = None
|
| 161 |
+
created_at: datetime
|
| 162 |
+
updated_at: datetime
|
| 163 |
+
|
| 164 |
+
class Config:
|
| 165 |
+
from_attributes = True
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
# ========== Donation Schemas ==========
|
| 169 |
+
|
| 170 |
+
class DonationBase(BaseModel):
|
| 171 |
+
"""Base donation schema with common fields."""
|
| 172 |
+
|
| 173 |
+
amount: int = Field(..., gt=0, description="Donation amount in cents")
|
| 174 |
+
donor_name: Optional[str] = Field(default=None, max_length=255, description="Donor's name")
|
| 175 |
+
|
| 176 |
+
|
| 177 |
+
class DonationCreate(DonationBase):
|
| 178 |
+
"""Schema for creating a new donation."""
|
| 179 |
+
user_id: Optional[UUID] = Field(default=None, description="Optional authenticated user ID")
|
| 180 |
+
|
| 181 |
+
|
| 182 |
+
class DonationUpdate(BaseModel):
|
| 183 |
+
"""Schema for updating an existing donation."""
|
| 184 |
+
|
| 185 |
+
status: Optional[str] = Field(default=None, description="Donation status")
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
class DonationRead(DonationBase):
|
| 189 |
+
"""Schema for reading donation data."""
|
| 190 |
+
|
| 191 |
+
id: UUID
|
| 192 |
+
user_id: Optional[UUID] = None
|
| 193 |
+
status: str
|
| 194 |
+
stripe_session_id: Optional[str]
|
| 195 |
+
checkout_url: Optional[str] = None
|
| 196 |
+
created_at: datetime
|
| 197 |
+
updated_at: datetime
|
| 198 |
+
|
| 199 |
+
class Config:
|
| 200 |
+
from_attributes = True
|
| 201 |
+
|
| 202 |
+
|
| 203 |
+
# ========== Chat Conversation Schemas ==========
|
| 204 |
+
|
| 205 |
+
class ChatConversationBase(BaseModel):
|
| 206 |
+
"""Base chat conversation schema with common fields."""
|
| 207 |
+
|
| 208 |
+
message: str = Field(..., min_length=1, max_length=5000, description="User's message")
|
| 209 |
+
response: Optional[str] = Field(default=None, min_length=1, max_length=5000, description="AI assistant's response")
|
| 210 |
+
metadata: Optional[Dict[str, Any]] = Field(default={}, description="Additional metadata")
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
class ChatConversationCreate(ChatConversationBase):
|
| 214 |
+
"""Schema for creating a new chat conversation."""
|
| 215 |
+
|
| 216 |
+
profile_id: UUID = Field(..., description="Profile ID")
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
class ChatConversationRead(ChatConversationBase):
|
| 220 |
+
"""Schema for reading chat conversation data."""
|
| 221 |
+
|
| 222 |
+
id: int
|
| 223 |
+
profile_id: UUID
|
| 224 |
+
created_at: datetime
|
| 225 |
+
|
| 226 |
+
class Config:
|
| 227 |
+
from_attributes = True
|
| 228 |
+
|
| 229 |
+
|
| 230 |
+
# ========== API Response Schemas ==========
|
| 231 |
+
|
| 232 |
+
class APIResponse(BaseModel):
|
| 233 |
+
"""Generic API response schema."""
|
| 234 |
+
|
| 235 |
+
success: bool = Field(..., description="Whether the operation was successful")
|
| 236 |
+
message: str = Field(..., description="Response message")
|
| 237 |
+
data: Optional[Any] = Field(default=None, description="Response data")
|
| 238 |
+
|
| 239 |
+
|
| 240 |
+
class PaginatedResponse(BaseModel):
|
| 241 |
+
"""Generic paginated response schema."""
|
| 242 |
+
|
| 243 |
+
items: List[Any] = Field(..., description="List of items")
|
| 244 |
+
total: int = Field(..., description="Total number of items")
|
| 245 |
+
page: int = Field(..., description="Current page number")
|
| 246 |
+
per_page: int = Field(..., description="Items per page")
|
| 247 |
+
pages: int = Field(..., description="Total number of pages")
|
| 248 |
+
|
| 249 |
+
|
| 250 |
+
# ========== Health Check Schema ==========
|
| 251 |
+
|
| 252 |
+
class HealthCheck(BaseModel):
|
| 253 |
+
"""Schema for health check endpoint."""
|
| 254 |
+
|
| 255 |
+
status: str = Field(..., description="Service status")
|
| 256 |
+
timestamp: datetime = Field(..., description="Check timestamp")
|
| 257 |
+
version: str = Field(..., description="API version")
|
| 258 |
+
database: str = Field(..., description="Database connection status")
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
# ========== Stripe Payment Schemas ==========
|
| 262 |
+
|
| 263 |
+
class PaymentIntentCreate(BaseModel):
|
| 264 |
+
"""Schema for creating Stripe payment intent."""
|
| 265 |
+
|
| 266 |
+
amount: int = Field(..., gt=0, description="Payment amount in cents")
|
| 267 |
+
currency: str = Field(default="usd", description="Currency code")
|
| 268 |
+
metadata: Optional[Dict[str, str]] = Field(default=None, description="Payment metadata")
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
class PaymentIntentResponse(BaseModel):
|
| 272 |
+
"""Schema for Stripe payment intent response."""
|
| 273 |
+
|
| 274 |
+
client_secret: str = Field(..., description="Stripe client secret")
|
| 275 |
+
payment_intent_id: str = Field(..., description="Stripe payment intent ID")
|
setup_rag_db.py
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import os
|
| 3 |
+
from dotenv import load_dotenv
|
| 4 |
+
from sqlalchemy.ext.asyncio import create_async_engine
|
| 5 |
+
from sqlalchemy import text
|
| 6 |
+
|
| 7 |
+
load_dotenv()
|
| 8 |
+
|
| 9 |
+
async def setup_rag_db():
|
| 10 |
+
database_url = os.getenv("DATABASE_URL")
|
| 11 |
+
if not database_url:
|
| 12 |
+
print("DATABASE_URL not found")
|
| 13 |
+
return
|
| 14 |
+
|
| 15 |
+
# Ensure python uses asyncpg
|
| 16 |
+
if "postgresql://" in database_url and "asyncpg" not in database_url:
|
| 17 |
+
database_url = database_url.replace("postgresql://", "postgresql+asyncpg://")
|
| 18 |
+
|
| 19 |
+
engine = create_async_engine(
|
| 20 |
+
database_url,
|
| 21 |
+
echo=True,
|
| 22 |
+
connect_args={
|
| 23 |
+
"statement_cache_size": 0,
|
| 24 |
+
"server_settings": {
|
| 25 |
+
"jit": "off",
|
| 26 |
+
}
|
| 27 |
+
}
|
| 28 |
+
)
|
| 29 |
+
|
| 30 |
+
async with engine.begin() as conn:
|
| 31 |
+
print("Enabling pgvector extension...")
|
| 32 |
+
await conn.execute(text("CREATE EXTENSION IF NOT EXISTS vector"))
|
| 33 |
+
await conn.execute(text("CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\""))
|
| 34 |
+
|
| 35 |
+
print("Checking/Creating documents table...")
|
| 36 |
+
# Drop table if exists to ensure correct schema (UUID vs BigInt conflict)
|
| 37 |
+
await conn.execute(text("DROP TABLE IF EXISTS documents CASCADE"))
|
| 38 |
+
|
| 39 |
+
await conn.execute(text("""
|
| 40 |
+
CREATE TABLE IF NOT EXISTS documents (
|
| 41 |
+
id uuid PRIMARY KEY DEFAULT uuid_generate_v4(),
|
| 42 |
+
content text,
|
| 43 |
+
metadata jsonb,
|
| 44 |
+
embedding vector(3072) -- Ensure 3072 dimensions
|
| 45 |
+
)
|
| 46 |
+
"""))
|
| 47 |
+
|
| 48 |
+
print("Creating match_documents function...")
|
| 49 |
+
await conn.execute(text("""
|
| 50 |
+
CREATE OR REPLACE FUNCTION match_documents (
|
| 51 |
+
query_embedding vector(3072),
|
| 52 |
+
match_threshold float,
|
| 53 |
+
match_count int
|
| 54 |
+
)
|
| 55 |
+
RETURNS TABLE (
|
| 56 |
+
id uuid,
|
| 57 |
+
content text,
|
| 58 |
+
metadata jsonb,
|
| 59 |
+
similarity float
|
| 60 |
+
)
|
| 61 |
+
LANGUAGE plpgsql
|
| 62 |
+
AS $$
|
| 63 |
+
BEGIN
|
| 64 |
+
RETURN QUERY
|
| 65 |
+
SELECT
|
| 66 |
+
documents.id,
|
| 67 |
+
documents.content,
|
| 68 |
+
documents.metadata,
|
| 69 |
+
1 - (documents.embedding <=> query_embedding) AS similarity
|
| 70 |
+
FROM documents
|
| 71 |
+
WHERE 1 - (documents.embedding <=> query_embedding) > match_threshold
|
| 72 |
+
ORDER BY similarity DESC
|
| 73 |
+
LIMIT match_count;
|
| 74 |
+
END;
|
| 75 |
+
$$;
|
| 76 |
+
"""))
|
| 77 |
+
|
| 78 |
+
print("Done setup.")
|
| 79 |
+
|
| 80 |
+
if __name__ == "__main__":
|
| 81 |
+
asyncio.run(setup_rag_db())
|
stripe_service.py
ADDED
|
@@ -0,0 +1,493 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Stripe payment service for Silver Table Assistant.
|
| 3 |
+
Handles payment processing, checkout sessions, and webhooks.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
import stripe
|
| 8 |
+
from typing import Optional, Dict, Any
|
| 9 |
+
from uuid import UUID
|
| 10 |
+
|
| 11 |
+
from config import settings
|
| 12 |
+
from exceptions import PaymentException, handle_payment_error
|
| 13 |
+
|
| 14 |
+
# Initialize Stripe API key
|
| 15 |
+
stripe.api_key = settings.stripe_secret_key
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def create_checkout_session_for_order(order) -> str:
|
| 19 |
+
"""
|
| 20 |
+
Create a Stripe checkout session for an order.
|
| 21 |
+
|
| 22 |
+
Args:
|
| 23 |
+
order: Order object with items and total_amount
|
| 24 |
+
|
| 25 |
+
Returns:
|
| 26 |
+
Stripe checkout session URL
|
| 27 |
+
"""
|
| 28 |
+
import logging
|
| 29 |
+
logger = logging.getLogger(__name__)
|
| 30 |
+
|
| 31 |
+
try:
|
| 32 |
+
# Debug: Log order structure
|
| 33 |
+
logger.info(f"[DEBUG] Order ID: {getattr(order, 'id', 'unknown')}")
|
| 34 |
+
logger.info(f"[DEBUG] Order items type: {type(order.items)}")
|
| 35 |
+
logger.info(f"[DEBUG] Order items: {order.items}")
|
| 36 |
+
logger.info(f"[DEBUG] Order total_amount: {order.total_amount}")
|
| 37 |
+
|
| 38 |
+
# Extract order items for Stripe line items
|
| 39 |
+
line_items = []
|
| 40 |
+
|
| 41 |
+
# Assuming order.items contains a list of menu items with details
|
| 42 |
+
# This would need to be parsed based on your order structure
|
| 43 |
+
if isinstance(order.items, list):
|
| 44 |
+
for item in order.items:
|
| 45 |
+
logger.info(f"[DEBUG] Processing item: {item}")
|
| 46 |
+
# Check if item has menu_item_id (frontend format)
|
| 47 |
+
if 'menu_item_id' in item:
|
| 48 |
+
# Need to fetch menu item details from menu_data
|
| 49 |
+
try:
|
| 50 |
+
from menu_data import get_menu_item_by_id
|
| 51 |
+
menu_item = get_menu_item_by_id(item['menu_item_id'])
|
| 52 |
+
menu_item_name = menu_item.get('name', 'Menu Item')
|
| 53 |
+
# Price in menu_data is already in cents
|
| 54 |
+
menu_item_price = menu_item.get('price', order.total_amount)
|
| 55 |
+
logger.info(f"[DEBUG] Fetched menu item: {menu_item_name}, price (cents): {menu_item_price}")
|
| 56 |
+
except (ValueError, ImportError) as e:
|
| 57 |
+
logger.error(f"[DEBUG] Failed to fetch menu item {item.get('menu_item_id')}: {e}")
|
| 58 |
+
menu_item_name = f"Menu Item {item.get('menu_item_id', 'Unknown')}"
|
| 59 |
+
menu_item_price = order.total_amount
|
| 60 |
+
else:
|
| 61 |
+
# Assuming item has menu_item_id, name, price, and quantity (expected format)
|
| 62 |
+
menu_item_name = item.get('name', 'Menu Item')
|
| 63 |
+
menu_item_price = item.get('price', order.total_amount)
|
| 64 |
+
|
| 65 |
+
quantity = item.get('quantity', 1)
|
| 66 |
+
|
| 67 |
+
line_items.append({
|
| 68 |
+
'price_data': {
|
| 69 |
+
'currency': settings.stripe_default_currency, # Taiwan Dollar
|
| 70 |
+
'product_data': {
|
| 71 |
+
'name': menu_item_name,
|
| 72 |
+
},
|
| 73 |
+
'unit_amount': int(menu_item_price), # Amount in cents
|
| 74 |
+
},
|
| 75 |
+
'quantity': quantity,
|
| 76 |
+
})
|
| 77 |
+
else:
|
| 78 |
+
# Single item order - fallback
|
| 79 |
+
line_items.append({
|
| 80 |
+
'price_data': {
|
| 81 |
+
'currency': settings.stripe_default_currency,
|
| 82 |
+
'product_data': {
|
| 83 |
+
'name': 'Silver Table Order',
|
| 84 |
+
},
|
| 85 |
+
'unit_amount': int(order.total_amount),
|
| 86 |
+
},
|
| 87 |
+
'quantity': 1,
|
| 88 |
+
})
|
| 89 |
+
|
| 90 |
+
# Create Stripe checkout session
|
| 91 |
+
session = stripe.checkout.Session.create(
|
| 92 |
+
payment_method_types=['card'],
|
| 93 |
+
line_items=line_items,
|
| 94 |
+
mode='payment',
|
| 95 |
+
success_url=f"{settings.frontend_url}/order/success?session_id={{CHECKOUT_SESSION_ID}}",
|
| 96 |
+
cancel_url=f"{settings.frontend_url}/order/cancel",
|
| 97 |
+
metadata={
|
| 98 |
+
'order_id': str(order.id),
|
| 99 |
+
'profile_id': str(order.profile_id),
|
| 100 |
+
'order_type': 'food_order'
|
| 101 |
+
},
|
| 102 |
+
shipping_address_collection={
|
| 103 |
+
'allowed_countries': ['TW'], # Taiwan
|
| 104 |
+
},
|
| 105 |
+
phone_number_collection={
|
| 106 |
+
'enabled': True,
|
| 107 |
+
}
|
| 108 |
+
)
|
| 109 |
+
|
| 110 |
+
return session.url
|
| 111 |
+
|
| 112 |
+
except Exception as e:
|
| 113 |
+
raise Exception(f"Failed to create checkout session: {str(e)}")
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
def create_checkout_session_for_donation(donation) -> str:
|
| 117 |
+
"""
|
| 118 |
+
Create a Stripe checkout session for a donation.
|
| 119 |
+
|
| 120 |
+
Args:
|
| 121 |
+
donation: Donation object with amount and donor_name
|
| 122 |
+
|
| 123 |
+
Returns:
|
| 124 |
+
Stripe checkout session URL
|
| 125 |
+
"""
|
| 126 |
+
try:
|
| 127 |
+
# Validate minimum donation amount for Stripe (donation.amount is in cents)
|
| 128 |
+
min_amount_cents = int(settings.MIN_DONATION_AMOUNT * 100)
|
| 129 |
+
if donation.amount < min_amount_cents:
|
| 130 |
+
raise PaymentException(f"捐款金額必須至少為 NT${settings.MIN_DONATION_AMOUNT} 元")
|
| 131 |
+
|
| 132 |
+
# Create Stripe checkout session for donation
|
| 133 |
+
session = stripe.checkout.Session.create(
|
| 134 |
+
payment_method_types=['card'],
|
| 135 |
+
line_items=[
|
| 136 |
+
{
|
| 137 |
+
'price_data': {
|
| 138 |
+
'currency': settings.stripe_default_currency, # Taiwan Dollar
|
| 139 |
+
'product_data': {
|
| 140 |
+
'name': '銀桌助手捐款',
|
| 141 |
+
'description': f'捐款人: {donation.donor_name or "匿名"}' if donation.donor_name else '匿名捐款',
|
| 142 |
+
},
|
| 143 |
+
'unit_amount': int(donation.amount), # Amount in cents
|
| 144 |
+
},
|
| 145 |
+
'quantity': 1,
|
| 146 |
+
}
|
| 147 |
+
],
|
| 148 |
+
mode='payment',
|
| 149 |
+
success_url=f"{settings.frontend_url}/donation/success?session_id={{CHECKOUT_SESSION_ID}}",
|
| 150 |
+
cancel_url=f"{settings.frontend_url}/donation/cancel",
|
| 151 |
+
metadata={
|
| 152 |
+
'donation_id': str(donation.id),
|
| 153 |
+
'donor_name': donation.donor_name or 'anonymous',
|
| 154 |
+
'order_type': 'donation'
|
| 155 |
+
},
|
| 156 |
+
phone_number_collection={
|
| 157 |
+
'enabled': True,
|
| 158 |
+
}
|
| 159 |
+
)
|
| 160 |
+
|
| 161 |
+
return session.url
|
| 162 |
+
|
| 163 |
+
except stripe.error.InvalidRequestError as e:
|
| 164 |
+
# Stripe can return an amount_too_small error code
|
| 165 |
+
code = getattr(e, 'code', None)
|
| 166 |
+
if code == 'amount_too_small' or 'amount_too_small' in str(e):
|
| 167 |
+
raise PaymentException(f"金額太低,最低需 NT${settings.MIN_DONATION_AMOUNT}")
|
| 168 |
+
raise handle_payment_error(e, 'create_checkout_session_for_donation')
|
| 169 |
+
except Exception as e:
|
| 170 |
+
raise handle_payment_error(e, 'create_checkout_session_for_donation')
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
def handle_webhook(payload: bytes, sig_header: str) -> Dict[str, Any]:
|
| 174 |
+
"""
|
| 175 |
+
Handle Stripe webhook events.
|
| 176 |
+
|
| 177 |
+
Args:
|
| 178 |
+
payload: Raw webhook payload
|
| 179 |
+
sig_header: Stripe signature header
|
| 180 |
+
|
| 181 |
+
Returns:
|
| 182 |
+
Dictionary with event details and status
|
| 183 |
+
"""
|
| 184 |
+
try:
|
| 185 |
+
# Verify webhook signature
|
| 186 |
+
event = stripe.Webhook.construct_event(
|
| 187 |
+
payload, sig_header, settings.stripe_webhook_secret
|
| 188 |
+
)
|
| 189 |
+
|
| 190 |
+
# Handle the event
|
| 191 |
+
if event['type'] == 'checkout.session.completed':
|
| 192 |
+
session = event['data']['object']
|
| 193 |
+
return handle_checkout_session_completed(session)
|
| 194 |
+
elif event['type'] == 'payment_intent.succeeded':
|
| 195 |
+
payment_intent = event['data']['object']
|
| 196 |
+
return handle_payment_intent_succeeded(payment_intent)
|
| 197 |
+
elif event['type'] == 'payment_intent.payment_failed':
|
| 198 |
+
payment_intent = event['data']['object']
|
| 199 |
+
return handle_payment_intent_failed(payment_intent)
|
| 200 |
+
else:
|
| 201 |
+
return {
|
| 202 |
+
'status': 'ignored',
|
| 203 |
+
'message': f'Event type {event["type"]} not handled',
|
| 204 |
+
'event_id': event['id']
|
| 205 |
+
}
|
| 206 |
+
|
| 207 |
+
except stripe.error.SignatureVerificationError as e:
|
| 208 |
+
# Invalid signature - treat as payment related/security error
|
| 209 |
+
raise PaymentException(f"Invalid webhook signature: {str(e)}")
|
| 210 |
+
except Exception as e:
|
| 211 |
+
raise Exception(f"Webhook handling failed: {str(e)}")
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
def handle_checkout_session_completed(session: Dict[str, Any]) -> Dict[str, Any]:
|
| 215 |
+
"""
|
| 216 |
+
Handle successful checkout session completion.
|
| 217 |
+
|
| 218 |
+
Args:
|
| 219 |
+
session: Stripe checkout session object
|
| 220 |
+
|
| 221 |
+
Returns:
|
| 222 |
+
Dictionary with processing result
|
| 223 |
+
"""
|
| 224 |
+
metadata = session.get('metadata', {})
|
| 225 |
+
order_type = metadata.get('order_type')
|
| 226 |
+
session_id = session['id']
|
| 227 |
+
|
| 228 |
+
if order_type == 'food_order':
|
| 229 |
+
order_id = metadata.get('order_id')
|
| 230 |
+
if order_id:
|
| 231 |
+
# Update order status to confirmed
|
| 232 |
+
return update_order_payment_status(UUID(order_id), 'confirmed', session_id)
|
| 233 |
+
else:
|
| 234 |
+
return {
|
| 235 |
+
'status': 'error',
|
| 236 |
+
'message': 'Missing order_id in metadata',
|
| 237 |
+
'session_id': session_id
|
| 238 |
+
}
|
| 239 |
+
|
| 240 |
+
elif order_type == 'donation':
|
| 241 |
+
donation_id = metadata.get('donation_id')
|
| 242 |
+
if donation_id:
|
| 243 |
+
# Update donation status to completed
|
| 244 |
+
return update_donation_payment_status(UUID(donation_id), 'completed', session_id)
|
| 245 |
+
else:
|
| 246 |
+
return {
|
| 247 |
+
'status': 'error',
|
| 248 |
+
'message': 'Missing donation_id in metadata',
|
| 249 |
+
'session_id': session_id
|
| 250 |
+
}
|
| 251 |
+
|
| 252 |
+
else:
|
| 253 |
+
return {
|
| 254 |
+
'status': 'error',
|
| 255 |
+
'message': f'Unknown order_type: {order_type}',
|
| 256 |
+
'session_id': session_id
|
| 257 |
+
}
|
| 258 |
+
|
| 259 |
+
|
| 260 |
+
def handle_payment_intent_succeeded(payment_intent: Dict[str, Any]) -> Dict[str, Any]:
|
| 261 |
+
"""
|
| 262 |
+
Handle successful payment intent.
|
| 263 |
+
|
| 264 |
+
Args:
|
| 265 |
+
payment_intent: Stripe payment intent object
|
| 266 |
+
|
| 267 |
+
Returns:
|
| 268 |
+
Dictionary with processing result
|
| 269 |
+
"""
|
| 270 |
+
# This is a backup handler for payment_intent.succeeded events
|
| 271 |
+
# The main handling should be done in checkout.session.completed
|
| 272 |
+
|
| 273 |
+
return {
|
| 274 |
+
'status': 'processed',
|
| 275 |
+
'message': 'Payment intent succeeded - handled via checkout session',
|
| 276 |
+
'payment_intent_id': payment_intent['id']
|
| 277 |
+
}
|
| 278 |
+
|
| 279 |
+
|
| 280 |
+
def handle_payment_intent_failed(payment_intent: Dict[str, Any]) -> Dict[str, Any]:
|
| 281 |
+
"""
|
| 282 |
+
Handle failed payment intent.
|
| 283 |
+
|
| 284 |
+
Args:
|
| 285 |
+
payment_intent: Stripe payment intent object
|
| 286 |
+
|
| 287 |
+
Returns:
|
| 288 |
+
Dictionary with processing result
|
| 289 |
+
"""
|
| 290 |
+
# Handle failed payments - you might want to update order/donation status
|
| 291 |
+
return {
|
| 292 |
+
'status': 'failed',
|
| 293 |
+
'message': 'Payment failed',
|
| 294 |
+
'payment_intent_id': payment_intent['id'],
|
| 295 |
+
'last_payment_error': payment_intent.get('last_payment_error', {})
|
| 296 |
+
}
|
| 297 |
+
|
| 298 |
+
|
| 299 |
+
def update_order_payment_status(order_id: UUID, status: str, stripe_session_id: str) -> Dict[str, Any]:
|
| 300 |
+
"""
|
| 301 |
+
Update order payment status in database.
|
| 302 |
+
|
| 303 |
+
Args:
|
| 304 |
+
order_id: Order ID
|
| 305 |
+
status: New status
|
| 306 |
+
stripe_session_id: Stripe session ID
|
| 307 |
+
|
| 308 |
+
Returns:
|
| 309 |
+
Dictionary with update result
|
| 310 |
+
"""
|
| 311 |
+
# This function would be called from the webhook handler
|
| 312 |
+
# You would need to inject the database session here
|
| 313 |
+
# For now, we'll just return the intended update data
|
| 314 |
+
|
| 315 |
+
return {
|
| 316 |
+
'status': 'success',
|
| 317 |
+
'message': f'Order {order_id} status updated to {status}',
|
| 318 |
+
'order_id': str(order_id),
|
| 319 |
+
'stripe_session_id': stripe_session_id,
|
| 320 |
+
'new_status': status
|
| 321 |
+
}
|
| 322 |
+
|
| 323 |
+
|
| 324 |
+
def update_donation_payment_status(donation_id: UUID, status: str, stripe_session_id: str) -> Dict[str, Any]:
|
| 325 |
+
"""
|
| 326 |
+
Update donation payment status in database.
|
| 327 |
+
|
| 328 |
+
Args:
|
| 329 |
+
donation_id: Donation ID
|
| 330 |
+
status: New status
|
| 331 |
+
stripe_session_id: Stripe session ID
|
| 332 |
+
|
| 333 |
+
Returns:
|
| 334 |
+
Dictionary with update result
|
| 335 |
+
"""
|
| 336 |
+
# This function would be called from the webhook handler
|
| 337 |
+
# You would need to inject the database session here
|
| 338 |
+
# For now, we'll just return the intended update data
|
| 339 |
+
|
| 340 |
+
return {
|
| 341 |
+
'status': 'success',
|
| 342 |
+
'message': f'Donation {donation_id} status updated to {status}',
|
| 343 |
+
'donation_id': str(donation_id),
|
| 344 |
+
'stripe_session_id': stripe_session_id,
|
| 345 |
+
'new_status': status
|
| 346 |
+
}
|
| 347 |
+
|
| 348 |
+
|
| 349 |
+
def retrieve_payment_intent(payment_intent_id: str) -> Optional[Dict[str, Any]]:
|
| 350 |
+
"""
|
| 351 |
+
Retrieve payment intent details from Stripe.
|
| 352 |
+
|
| 353 |
+
Args:
|
| 354 |
+
payment_intent_id: Stripe payment intent ID
|
| 355 |
+
|
| 356 |
+
Returns:
|
| 357 |
+
Payment intent object or None if not found
|
| 358 |
+
"""
|
| 359 |
+
try:
|
| 360 |
+
return stripe.PaymentIntent.retrieve(payment_intent_id)
|
| 361 |
+
except stripe.error.InvalidRequestError:
|
| 362 |
+
return None
|
| 363 |
+
|
| 364 |
+
|
| 365 |
+
def create_refund(payment_intent_id: str, amount: Optional[int] = None) -> Dict[str, Any]:
|
| 366 |
+
"""
|
| 367 |
+
Create a refund for a payment.
|
| 368 |
+
|
| 369 |
+
Args:
|
| 370 |
+
payment_intent_id: Payment intent ID to refund
|
| 371 |
+
amount: Refund amount in cents (optional, defaults to full amount)
|
| 372 |
+
|
| 373 |
+
Returns:
|
| 374 |
+
Refund object
|
| 375 |
+
"""
|
| 376 |
+
try:
|
| 377 |
+
refund_data = {
|
| 378 |
+
'payment_intent': payment_intent_id
|
| 379 |
+
}
|
| 380 |
+
|
| 381 |
+
if amount:
|
| 382 |
+
refund_data['amount'] = amount
|
| 383 |
+
|
| 384 |
+
refund = stripe.Refund.create(**refund_data)
|
| 385 |
+
|
| 386 |
+
return {
|
| 387 |
+
'status': 'success',
|
| 388 |
+
'refund_id': refund['id'],
|
| 389 |
+
'amount': refund['amount'],
|
| 390 |
+
'status': refund['status']
|
| 391 |
+
}
|
| 392 |
+
|
| 393 |
+
except Exception as e:
|
| 394 |
+
return {
|
| 395 |
+
'status': 'error',
|
| 396 |
+
'message': f'Failed to create refund: {str(e)}'
|
| 397 |
+
}
|
| 398 |
+
|
| 399 |
+
|
| 400 |
+
def get_payment_methods(customer_id: str) -> Dict[str, Any]:
|
| 401 |
+
"""
|
| 402 |
+
Get saved payment methods for a customer.
|
| 403 |
+
|
| 404 |
+
Args:
|
| 405 |
+
customer_id: Stripe customer ID
|
| 406 |
+
|
| 407 |
+
Returns:
|
| 408 |
+
Dictionary with payment methods
|
| 409 |
+
"""
|
| 410 |
+
try:
|
| 411 |
+
payment_methods = stripe.PaymentMethod.list(
|
| 412 |
+
customer=customer_id,
|
| 413 |
+
type='card'
|
| 414 |
+
)
|
| 415 |
+
|
| 416 |
+
return {
|
| 417 |
+
'status': 'success',
|
| 418 |
+
'payment_methods': payment_methods['data']
|
| 419 |
+
}
|
| 420 |
+
|
| 421 |
+
except Exception as e:
|
| 422 |
+
return {
|
| 423 |
+
'status': 'error',
|
| 424 |
+
'message': f'Failed to retrieve payment methods: {str(e)}'
|
| 425 |
+
}
|
| 426 |
+
|
| 427 |
+
|
| 428 |
+
def construct_webhook_event(payload: bytes, sig_header: str, secret: str) -> Dict[str, Any]:
|
| 429 |
+
"""
|
| 430 |
+
Construct webhook event from payload and signature.
|
| 431 |
+
|
| 432 |
+
Args:
|
| 433 |
+
payload: Raw payload bytes
|
| 434 |
+
sig_header: Stripe signature header
|
| 435 |
+
secret: Webhook secret
|
| 436 |
+
|
| 437 |
+
Returns:
|
| 438 |
+
Constructed event object
|
| 439 |
+
"""
|
| 440 |
+
return stripe.Webhook.construct_event(payload, sig_header, secret)
|
| 441 |
+
|
| 442 |
+
|
| 443 |
+
# Utility functions for webhook handling
|
| 444 |
+
|
| 445 |
+
def parse_metadata(metadata: Dict[str, str]) -> Dict[str, Any]:
|
| 446 |
+
"""
|
| 447 |
+
Parse Stripe metadata and extract useful information.
|
| 448 |
+
|
| 449 |
+
Args:
|
| 450 |
+
metadata: Stripe metadata dictionary
|
| 451 |
+
|
| 452 |
+
Returns:
|
| 453 |
+
Parsed metadata with type conversion
|
| 454 |
+
"""
|
| 455 |
+
parsed = {}
|
| 456 |
+
|
| 457 |
+
for key, value in metadata.items():
|
| 458 |
+
if key.endswith('_id') and value:
|
| 459 |
+
try:
|
| 460 |
+
# Convert UUID strings back to UUID objects
|
| 461 |
+
parsed[key] = UUID(value)
|
| 462 |
+
except ValueError:
|
| 463 |
+
parsed[key] = value
|
| 464 |
+
else:
|
| 465 |
+
parsed[key] = value
|
| 466 |
+
|
| 467 |
+
return parsed
|
| 468 |
+
|
| 469 |
+
|
| 470 |
+
def format_twd_amount(cents: int) -> str:
|
| 471 |
+
"""
|
| 472 |
+
Format amount in cents to Taiwan Dollar string.
|
| 473 |
+
|
| 474 |
+
Args:
|
| 475 |
+
cents: Amount in cents
|
| 476 |
+
|
| 477 |
+
Returns:
|
| 478 |
+
Formatted amount string
|
| 479 |
+
"""
|
| 480 |
+
return settings.format_currency(cents)
|
| 481 |
+
|
| 482 |
+
|
| 483 |
+
def validate_twd_currency(amount: int) -> bool:
|
| 484 |
+
"""
|
| 485 |
+
Validate if amount is appropriate for Taiwan Dollar.
|
| 486 |
+
|
| 487 |
+
Args:
|
| 488 |
+
amount: Amount in cents
|
| 489 |
+
|
| 490 |
+
Returns:
|
| 491 |
+
True if amount is valid
|
| 492 |
+
"""
|
| 493 |
+
return settings.min_order_amount <= amount <= settings.max_order_amount
|