Spaces:
Sleeping
Sleeping
Commit
·
c05ab2d
0
Parent(s):
Initial commit: FastAPI URL Blink application
Browse files- .gitattributes +11 -0
- .github/workflows/sync-to-hf.yml +61 -0
- .gitignore +48 -0
- Dockerfile +38 -0
- PRIVATE_KEY.pem +28 -0
- app.py +189 -0
- database.py +48 -0
- encryption.py +141 -0
- models.py +29 -0
- requirements.txt +7 -0
.gitattributes
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
*.jpg filter=lfs diff=lfs merge=lfs -text
|
| 2 |
+
*.jpeg filter=lfs diff=lfs merge=lfs -text
|
| 3 |
+
*.png filter=lfs diff=lfs merge=lfs -text
|
| 4 |
+
*.gif filter=lfs diff=lfs merge=lfs -text
|
| 5 |
+
*.mp3 filter=lfs diff=lfs merge=lfs -text
|
| 6 |
+
*.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 7 |
+
*.wav filter=lfs diff=lfs merge=lfs -text
|
| 8 |
+
*.ttf filter=lfs diff=lfs merge=lfs -text
|
| 9 |
+
*.db filter=lfs diff=lfs merge=lfs -text
|
| 10 |
+
sliding_puzzle filter=lfs diff=lfs merge=lfs -text
|
| 11 |
+
stockfish/stockfish-ubuntu-x86-64-avx2 filter=lfs diff=lfs merge=lfs -text
|
.github/workflows/sync-to-hf.yml
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Sync to Hugging Face hub
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
push:
|
| 5 |
+
branches: [main]
|
| 6 |
+
workflow_dispatch:
|
| 7 |
+
|
| 8 |
+
jobs:
|
| 9 |
+
sync-to-hub:
|
| 10 |
+
runs-on: ubuntu-latest
|
| 11 |
+
|
| 12 |
+
steps:
|
| 13 |
+
- uses: actions/checkout@v3
|
| 14 |
+
with:
|
| 15 |
+
fetch-depth: 0
|
| 16 |
+
lfs: true
|
| 17 |
+
|
| 18 |
+
- name: Configure Git
|
| 19 |
+
run: |
|
| 20 |
+
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
| 21 |
+
git config --global user.name "github-actions[bot]"
|
| 22 |
+
|
| 23 |
+
- name: Commit any uncommitted changes
|
| 24 |
+
run: |
|
| 25 |
+
git add -A
|
| 26 |
+
git diff --staged --quiet || git commit -m "Save working directory before LFS migration"
|
| 27 |
+
|
| 28 |
+
- name: Configure Git LFS
|
| 29 |
+
run: |
|
| 30 |
+
git lfs install
|
| 31 |
+
|
| 32 |
+
- name: Fetch all LFS objects
|
| 33 |
+
run: |
|
| 34 |
+
git lfs fetch --all
|
| 35 |
+
git lfs checkout
|
| 36 |
+
|
| 37 |
+
- name: Migrate existing files to LFS
|
| 38 |
+
run: |
|
| 39 |
+
git lfs migrate import --include="*.jpg,*.jpeg,*.png,*.gif,*.mp3,*.mp4,*.wav,*.ttf,*.db,sliding_puzzle,stockfish/stockfish-ubuntu-x86-64-avx2" --everything
|
| 40 |
+
|
| 41 |
+
- name: Track binary files with LFS
|
| 42 |
+
run: |
|
| 43 |
+
git lfs track "*.jpg" "*.jpeg" "*.png" "*.gif" "*.mp3" "*.mp4" "*.wav" "*.ttf" "*.db"
|
| 44 |
+
git lfs track "sliding_puzzle"
|
| 45 |
+
git lfs track "stockfish/stockfish-ubuntu-x86-64-avx2"
|
| 46 |
+
git add .gitattributes
|
| 47 |
+
git diff --staged --quiet || git commit -m "Configure Git LFS tracking"
|
| 48 |
+
|
| 49 |
+
- name: Fetch LFS objects after migration
|
| 50 |
+
run: |
|
| 51 |
+
git lfs fetch --all
|
| 52 |
+
git lfs pull
|
| 53 |
+
|
| 54 |
+
- name: Push to Hugging Face Space
|
| 55 |
+
env:
|
| 56 |
+
HF_TOKEN: ${{ secrets.HF_TOKEN }}
|
| 57 |
+
run: |
|
| 58 |
+
git remote remove space 2>/dev/null || true
|
| 59 |
+
git remote add space https://jebin2:${HF_TOKEN}@huggingface.co/spaces/jebin2/apigateway
|
| 60 |
+
git lfs push --all space main
|
| 61 |
+
git push --force space main
|
.gitignore
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Byte-compiled / optimized / DLL files
|
| 2 |
+
__pycache__/
|
| 3 |
+
*.py[cod]
|
| 4 |
+
*$py.class
|
| 5 |
+
|
| 6 |
+
# Virtual environment
|
| 7 |
+
venv/
|
| 8 |
+
env/
|
| 9 |
+
.venv/
|
| 10 |
+
.env/
|
| 11 |
+
|
| 12 |
+
# SQLite database
|
| 13 |
+
*.db
|
| 14 |
+
*.sqlite
|
| 15 |
+
*.sqlite3
|
| 16 |
+
|
| 17 |
+
# Environment variables
|
| 18 |
+
.env
|
| 19 |
+
.env.local
|
| 20 |
+
.env.*.local
|
| 21 |
+
|
| 22 |
+
# IDE
|
| 23 |
+
.idea/
|
| 24 |
+
.vscode/
|
| 25 |
+
*.swp
|
| 26 |
+
*.swo
|
| 27 |
+
*~
|
| 28 |
+
|
| 29 |
+
# OS
|
| 30 |
+
.DS_Store
|
| 31 |
+
Thumbs.db
|
| 32 |
+
|
| 33 |
+
# Logs
|
| 34 |
+
*.log
|
| 35 |
+
logs/
|
| 36 |
+
|
| 37 |
+
# Testing
|
| 38 |
+
.pytest_cache/
|
| 39 |
+
.coverage
|
| 40 |
+
htmlcov/
|
| 41 |
+
|
| 42 |
+
# Distribution
|
| 43 |
+
dist/
|
| 44 |
+
build/
|
| 45 |
+
*.egg-info/
|
| 46 |
+
|
| 47 |
+
# Private keys (keep in repo but be careful)
|
| 48 |
+
# PRIVATE_KEY.pem
|
Dockerfile
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM python:3.11-slim
|
| 2 |
+
|
| 3 |
+
# Set working directory
|
| 4 |
+
WORKDIR /app
|
| 5 |
+
|
| 6 |
+
# Set environment variables
|
| 7 |
+
ENV PYTHONDONTWRITEBYTECODE=1 \
|
| 8 |
+
PYTHONUNBUFFERED=1 \
|
| 9 |
+
PYTHONPATH=/app
|
| 10 |
+
|
| 11 |
+
# Install system dependencies
|
| 12 |
+
RUN apt-get update && apt-get install -y --no-install-recommends \
|
| 13 |
+
gcc \
|
| 14 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 15 |
+
|
| 16 |
+
# Copy requirements first for better caching
|
| 17 |
+
COPY requirements.txt .
|
| 18 |
+
|
| 19 |
+
# Install Python dependencies
|
| 20 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
| 21 |
+
|
| 22 |
+
# Copy application code
|
| 23 |
+
COPY . .
|
| 24 |
+
|
| 25 |
+
# Create non-root user for security
|
| 26 |
+
RUN adduser --disabled-password --gecos '' appuser && \
|
| 27 |
+
chown -R appuser:appuser /app
|
| 28 |
+
USER appuser
|
| 29 |
+
|
| 30 |
+
# Expose port
|
| 31 |
+
EXPOSE 7860
|
| 32 |
+
|
| 33 |
+
# Health check
|
| 34 |
+
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
| 35 |
+
CMD python -c "import urllib.request; urllib.request.urlopen('http://localhost:7860/health')" || exit 1
|
| 36 |
+
|
| 37 |
+
# Run the application
|
| 38 |
+
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
|
PRIVATE_KEY.pem
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
-----BEGIN PRIVATE KEY-----
|
| 2 |
+
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCSaIwhbd+RdScq
|
| 3 |
+
OEC2GurgbR6yYOHS6fQMmg8CqKOL6sDSy4YyrSknAU4lYDnMs2T8hqX37Xo+SzAj
|
| 4 |
+
E5oilT/bKhfVaEVuVTdjB+4bDFx+AcMVFnUIYNagQdnv5DwId5bc3gm3Vg49dfH1
|
| 5 |
+
Z40VBCzrkJfsVT1cGuH77aDH4I5saL6IZKYrX53qE+c0gKWbHNe1b2oQFSjsk/qp
|
| 6 |
+
9YU+NjEI+NbxGCabPWW7IBE597c8psEMb92gdMkr5bGrwVAyRC2GZbARCwcHwwzM
|
| 7 |
+
38ItDAL1THckOPyVXHfs+9mzdtRPAeRDlBmGDJxoxr1p+CFwbru97LasNjGri6d0
|
| 8 |
+
vR03hdMtAgMBAAECggEAEqdC/KVbin7zIPluy+TXulqcwnGpLplD0Jvg6rVNX6+7
|
| 9 |
+
+9TAwdHMavL1ymRH/kZ4SNuSPL7NyC8F2zTi+qH/s0QvKi+b8lKQI8wTBZuIaJN/
|
| 10 |
+
5T6/EB9fvlD1ho9n01UClxxWVOdKxCuJStDJKagmXahGpZk3SqCwMcv/o/JTzbLv
|
| 11 |
+
R5KS7rFBXs+XCDHJ+LqJCezdAoadI07JQGoStygtQuLdwG6KhVpHdcLfh2RTnUSM
|
| 12 |
+
UaJZ2gdlMrau9Juk3Z6f/ceNRI8/fHS+ZWmbAy2YGDeH8rmip0ioKolalW9BlsWA
|
| 13 |
+
uVYWrSONhTHxuVT8Q8Leixu9nE+KUJUuDV0xhvbQHwKBgQDNQ9NjCRZCr8z/FaFT
|
| 14 |
+
fEG4rSiSFAFpAZzFcEAhc1qNgdYZ8SkEGjNcWTIk788xijRGPlBWiIM7x5et5N7p
|
| 15 |
+
+e+5qbPhz0SR3pLnubLrXLkchjqmPd1oAjGIwRUIEJF+qk1PMXvjR9DvFpJAICu+
|
| 16 |
+
Myf6+uWiUZ4raV9Smsv5P4u7pwKBgQC2mJAknVoxBemAvSv5YbvfBvN/lkzBcBiD
|
| 17 |
+
JmyTOwncNGsJ9KXLSWdhJ1wjrtKB5cQPuiNoEzrTgEzhWlehivT8giVntVmdopeB
|
| 18 |
+
x1l8iKzGTGpmpwSnnzPCtFhYuPhnZj1IMor/+WqZ79Smo6JIzGJ+0xg6BwhMz92B
|
| 19 |
+
43RCIKGFCwKBgQCbK8X2XR015n5a63gci5eHS2ebHoJ+ZhikbRod730p8lfvHo76
|
| 20 |
+
KfBfxJISuDLyaV7hJyRGdkZ2/4ibjaQyTp45xZ9VegGFIDP/9zLZmCvb82EM4UGp
|
| 21 |
+
6daWYhhLE3NZxNZSor7b7yN0SqTe0zMeQr/bjKXh0j92496KrLi7wJ0aiwKBgEYV
|
| 22 |
+
6wXSy+KTNVwvwCg5hkYFwgLP6ug4oX/9iKE+gPtft1Ib8GNF6oNU+z3LBYvMaGPs
|
| 23 |
+
+1ggQW0bCudYqNgdoQkm3zqeViZ2WRb8MHHneAGpJRH/u36nUdPDK0HqxZXSnWUP
|
| 24 |
+
2WCFGJC7iHDp0AmHQasSVXM4bcwl4QzRBDe5lKWfAoGAK381hlxKSM7YTf7NgSqo
|
| 25 |
+
ysAfdPEaKpxv4Pifcx1+i21KLLLic2cBw6tCKXDPcI6P6cYW5qIyH4G0cZlusISd
|
| 26 |
+
KvfxPwK/wZVejuQFfsSDLFRYDe9wRMBFEr/gSWO7S0sqmfhQ/QjyZsx9qO2dfJlA
|
| 27 |
+
upKwQSvGLRitnMom69uLllc=
|
| 28 |
+
-----END PRIVATE KEY-----
|
app.py
ADDED
|
@@ -0,0 +1,189 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
FastAPI URL Blink Application
|
| 3 |
+
|
| 4 |
+
Production-grade API for receiving encrypted user data,
|
| 5 |
+
decrypting it, and storing in SQLite database.
|
| 6 |
+
"""
|
| 7 |
+
import logging
|
| 8 |
+
from contextlib import asynccontextmanager
|
| 9 |
+
from typing import Optional
|
| 10 |
+
|
| 11 |
+
from fastapi import FastAPI, Query, Request, Depends, HTTPException, status
|
| 12 |
+
from fastapi.middleware.cors import CORSMiddleware
|
| 13 |
+
from fastapi.responses import JSONResponse
|
| 14 |
+
from sqlalchemy.ext.asyncio import AsyncSession
|
| 15 |
+
|
| 16 |
+
from database import get_db, init_db
|
| 17 |
+
from models import BlinkData
|
| 18 |
+
from encryption import decrypt_data, decrypt_multiple_blocks
|
| 19 |
+
|
| 20 |
+
# Configure logging
|
| 21 |
+
logging.basicConfig(
|
| 22 |
+
level=logging.INFO,
|
| 23 |
+
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
| 24 |
+
)
|
| 25 |
+
logger = logging.getLogger(__name__)
|
| 26 |
+
|
| 27 |
+
# User ID length constant
|
| 28 |
+
USER_ID_LENGTH = 20
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
@asynccontextmanager
|
| 32 |
+
async def lifespan(app: FastAPI):
|
| 33 |
+
"""
|
| 34 |
+
Application lifespan manager.
|
| 35 |
+
Initializes database on startup.
|
| 36 |
+
"""
|
| 37 |
+
logger.info("Starting up - initializing database...")
|
| 38 |
+
await init_db()
|
| 39 |
+
logger.info("Database initialized successfully")
|
| 40 |
+
yield
|
| 41 |
+
logger.info("Shutting down...")
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
# Create FastAPI application
|
| 45 |
+
app = FastAPI(
|
| 46 |
+
title="URL Blink API",
|
| 47 |
+
description="API for receiving and processing encrypted user data",
|
| 48 |
+
version="1.0.0",
|
| 49 |
+
lifespan=lifespan
|
| 50 |
+
)
|
| 51 |
+
|
| 52 |
+
# Configure CORS
|
| 53 |
+
app.add_middleware(
|
| 54 |
+
CORSMiddleware,
|
| 55 |
+
allow_origins=["*"], # Configure appropriately for production
|
| 56 |
+
allow_credentials=True,
|
| 57 |
+
allow_methods=["*"],
|
| 58 |
+
allow_headers=["*"],
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
@app.get("/health")
|
| 63 |
+
async def health_check():
|
| 64 |
+
"""
|
| 65 |
+
Health check endpoint.
|
| 66 |
+
|
| 67 |
+
Returns:
|
| 68 |
+
Health status of the application
|
| 69 |
+
"""
|
| 70 |
+
return {"status": "healthy", "service": "url-blink-api"}
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
@app.get("/blink")
|
| 74 |
+
async def blink(
|
| 75 |
+
request: Request,
|
| 76 |
+
userid: str = Query(..., description="User ID (20 chars) + encrypted data"),
|
| 77 |
+
db: AsyncSession = Depends(get_db)
|
| 78 |
+
):
|
| 79 |
+
"""
|
| 80 |
+
Process blink request with encrypted user data.
|
| 81 |
+
|
| 82 |
+
The userid parameter format:
|
| 83 |
+
- First 20 characters: User ID
|
| 84 |
+
- Remaining characters: Base64 encoded encrypted data
|
| 85 |
+
|
| 86 |
+
Args:
|
| 87 |
+
request: FastAPI request object
|
| 88 |
+
userid: Combined user ID and encrypted data
|
| 89 |
+
db: Database session
|
| 90 |
+
|
| 91 |
+
Returns:
|
| 92 |
+
Success response with processing status
|
| 93 |
+
"""
|
| 94 |
+
try:
|
| 95 |
+
# Validate minimum length
|
| 96 |
+
if len(userid) < USER_ID_LENGTH:
|
| 97 |
+
raise HTTPException(
|
| 98 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 99 |
+
detail=f"Parameter 'userid' must be at least {USER_ID_LENGTH} characters"
|
| 100 |
+
)
|
| 101 |
+
|
| 102 |
+
# Extract user_id (first 20 characters)
|
| 103 |
+
user_id = userid[:USER_ID_LENGTH]
|
| 104 |
+
|
| 105 |
+
# Extract encrypted data (remaining characters)
|
| 106 |
+
encrypted_data = userid[USER_ID_LENGTH:]
|
| 107 |
+
|
| 108 |
+
if not encrypted_data:
|
| 109 |
+
logger.warning(f"No encrypted data received for user: {user_id}")
|
| 110 |
+
# Still store the record with empty json_data
|
| 111 |
+
decrypted_results = []
|
| 112 |
+
else:
|
| 113 |
+
# Try to decrypt - might be single or multiple blocks
|
| 114 |
+
try:
|
| 115 |
+
decrypted_results = decrypt_multiple_blocks(encrypted_data)
|
| 116 |
+
except Exception as e:
|
| 117 |
+
logger.error(f"Decryption failed for user {user_id}: {e}")
|
| 118 |
+
# Store with error information
|
| 119 |
+
decrypted_results = [{"error": str(e), "raw_encrypted": encrypted_data[:100]}]
|
| 120 |
+
|
| 121 |
+
# Get referer URL from headers
|
| 122 |
+
refer_url = request.headers.get("referer") or request.headers.get("origin")
|
| 123 |
+
|
| 124 |
+
# Store each decrypted result as separate records
|
| 125 |
+
records_created = 0
|
| 126 |
+
for json_data in decrypted_results:
|
| 127 |
+
blink_record = BlinkData(
|
| 128 |
+
user_id=user_id,
|
| 129 |
+
refer_url=refer_url,
|
| 130 |
+
json_data=json_data
|
| 131 |
+
)
|
| 132 |
+
db.add(blink_record)
|
| 133 |
+
records_created += 1
|
| 134 |
+
|
| 135 |
+
# If no results but we have encrypted data, store a record with the raw data reference
|
| 136 |
+
if not decrypted_results and encrypted_data:
|
| 137 |
+
blink_record = BlinkData(
|
| 138 |
+
user_id=user_id,
|
| 139 |
+
refer_url=refer_url,
|
| 140 |
+
json_data={"encrypted_length": len(encrypted_data)}
|
| 141 |
+
)
|
| 142 |
+
db.add(blink_record)
|
| 143 |
+
records_created = 1
|
| 144 |
+
|
| 145 |
+
await db.commit()
|
| 146 |
+
|
| 147 |
+
logger.info(f"Successfully processed blink for user: {user_id}, records: {records_created}")
|
| 148 |
+
|
| 149 |
+
return JSONResponse(
|
| 150 |
+
status_code=status.HTTP_200_OK,
|
| 151 |
+
content={
|
| 152 |
+
"status": "success",
|
| 153 |
+
"user_id": user_id,
|
| 154 |
+
"records_created": records_created
|
| 155 |
+
}
|
| 156 |
+
)
|
| 157 |
+
|
| 158 |
+
except HTTPException:
|
| 159 |
+
raise
|
| 160 |
+
except Exception as e:
|
| 161 |
+
logger.error(f"Error processing blink request: {e}")
|
| 162 |
+
await db.rollback()
|
| 163 |
+
raise HTTPException(
|
| 164 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 165 |
+
detail="Internal server error processing request"
|
| 166 |
+
)
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
@app.exception_handler(Exception)
|
| 170 |
+
async def global_exception_handler(request: Request, exc: Exception):
|
| 171 |
+
"""
|
| 172 |
+
Global exception handler for unhandled errors.
|
| 173 |
+
"""
|
| 174 |
+
logger.error(f"Unhandled exception: {exc}")
|
| 175 |
+
return JSONResponse(
|
| 176 |
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
| 177 |
+
content={"detail": "Internal server error"}
|
| 178 |
+
)
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
if __name__ == "__main__":
|
| 182 |
+
import uvicorn
|
| 183 |
+
uvicorn.run(
|
| 184 |
+
"app:app",
|
| 185 |
+
host="0.0.0.0",
|
| 186 |
+
port=8000,
|
| 187 |
+
reload=True,
|
| 188 |
+
log_level="info"
|
| 189 |
+
)
|
database.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Database configuration for SQLite with SQLAlchemy.
|
| 3 |
+
"""
|
| 4 |
+
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
|
| 5 |
+
from sqlalchemy.orm import DeclarativeBase
|
| 6 |
+
import os
|
| 7 |
+
|
| 8 |
+
# Database URL - SQLite file in the same directory
|
| 9 |
+
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite+aiosqlite:///./blink_data.db")
|
| 10 |
+
|
| 11 |
+
# Create async engine
|
| 12 |
+
engine = create_async_engine(
|
| 13 |
+
DATABASE_URL,
|
| 14 |
+
echo=False, # Set to True for SQL debugging
|
| 15 |
+
future=True
|
| 16 |
+
)
|
| 17 |
+
|
| 18 |
+
# Session factory
|
| 19 |
+
async_session_maker = async_sessionmaker(
|
| 20 |
+
engine,
|
| 21 |
+
class_=AsyncSession,
|
| 22 |
+
expire_on_commit=False
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class Base(DeclarativeBase):
|
| 27 |
+
"""Base class for SQLAlchemy models."""
|
| 28 |
+
pass
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
async def get_db() -> AsyncSession:
|
| 32 |
+
"""
|
| 33 |
+
Dependency injection for database sessions.
|
| 34 |
+
Yields a database session and ensures cleanup.
|
| 35 |
+
"""
|
| 36 |
+
async with async_session_maker() as session:
|
| 37 |
+
try:
|
| 38 |
+
yield session
|
| 39 |
+
finally:
|
| 40 |
+
await session.close()
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
async def init_db():
|
| 44 |
+
"""
|
| 45 |
+
Initialize the database by creating all tables.
|
| 46 |
+
"""
|
| 47 |
+
async with engine.begin() as conn:
|
| 48 |
+
await conn.run_sync(Base.metadata.create_all)
|
encryption.py
ADDED
|
@@ -0,0 +1,141 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
RSA Decryption utilities for the URL Blink application.
|
| 3 |
+
"""
|
| 4 |
+
import base64
|
| 5 |
+
import json
|
| 6 |
+
import os
|
| 7 |
+
import logging
|
| 8 |
+
from typing import Any, Optional
|
| 9 |
+
from cryptography.hazmat.primitives import serialization, hashes
|
| 10 |
+
from cryptography.hazmat.primitives.asymmetric import padding
|
| 11 |
+
from cryptography.hazmat.backends import default_backend
|
| 12 |
+
|
| 13 |
+
logger = logging.getLogger(__name__)
|
| 14 |
+
|
| 15 |
+
# Path to the private key file
|
| 16 |
+
PRIVATE_KEY_PATH = os.getenv("PRIVATE_KEY_PATH", "./PRIVATE_KEY.pem")
|
| 17 |
+
|
| 18 |
+
# Cache the private key after first load
|
| 19 |
+
_private_key = None
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def load_private_key():
|
| 23 |
+
"""
|
| 24 |
+
Load the RSA private key from the PEM file.
|
| 25 |
+
Caches the key for subsequent calls.
|
| 26 |
+
|
| 27 |
+
Returns:
|
| 28 |
+
RSA private key object
|
| 29 |
+
|
| 30 |
+
Raises:
|
| 31 |
+
FileNotFoundError: If private key file doesn't exist
|
| 32 |
+
ValueError: If private key is invalid
|
| 33 |
+
"""
|
| 34 |
+
global _private_key
|
| 35 |
+
|
| 36 |
+
if _private_key is not None:
|
| 37 |
+
return _private_key
|
| 38 |
+
|
| 39 |
+
try:
|
| 40 |
+
with open(PRIVATE_KEY_PATH, "rb") as key_file:
|
| 41 |
+
_private_key = serialization.load_pem_private_key(
|
| 42 |
+
key_file.read(),
|
| 43 |
+
password=None,
|
| 44 |
+
backend=default_backend()
|
| 45 |
+
)
|
| 46 |
+
logger.info(f"Successfully loaded private key from {PRIVATE_KEY_PATH}")
|
| 47 |
+
return _private_key
|
| 48 |
+
except FileNotFoundError:
|
| 49 |
+
logger.error(f"Private key file not found: {PRIVATE_KEY_PATH}")
|
| 50 |
+
raise
|
| 51 |
+
except Exception as e:
|
| 52 |
+
logger.error(f"Failed to load private key: {e}")
|
| 53 |
+
raise ValueError(f"Invalid private key: {e}")
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
def decrypt_data(encrypted_base64: str) -> Optional[Any]:
|
| 57 |
+
"""
|
| 58 |
+
Decrypt base64-encoded RSA encrypted data.
|
| 59 |
+
|
| 60 |
+
Args:
|
| 61 |
+
encrypted_base64: Base64 URL-safe encoded encrypted string
|
| 62 |
+
|
| 63 |
+
Returns:
|
| 64 |
+
Decrypted data parsed as JSON, or raw string if not valid JSON
|
| 65 |
+
|
| 66 |
+
Raises:
|
| 67 |
+
ValueError: If decryption fails
|
| 68 |
+
"""
|
| 69 |
+
try:
|
| 70 |
+
# Load the private key
|
| 71 |
+
private_key = load_private_key()
|
| 72 |
+
|
| 73 |
+
# Decode base64 URL-safe encoded data
|
| 74 |
+
# Add padding if necessary
|
| 75 |
+
padded = encrypted_base64 + '=' * (4 - len(encrypted_base64) % 4)
|
| 76 |
+
encrypted_bytes = base64.urlsafe_b64decode(padded)
|
| 77 |
+
|
| 78 |
+
# Decrypt using RSA OAEP with SHA256
|
| 79 |
+
decrypted_bytes = private_key.decrypt(
|
| 80 |
+
encrypted_bytes,
|
| 81 |
+
padding.OAEP(
|
| 82 |
+
mgf=padding.MGF1(algorithm=hashes.SHA256()),
|
| 83 |
+
algorithm=hashes.SHA256(),
|
| 84 |
+
label=None
|
| 85 |
+
)
|
| 86 |
+
)
|
| 87 |
+
|
| 88 |
+
# Decode to string
|
| 89 |
+
decrypted_str = decrypted_bytes.decode('utf-8')
|
| 90 |
+
|
| 91 |
+
# Try to parse as JSON
|
| 92 |
+
try:
|
| 93 |
+
return json.loads(decrypted_str)
|
| 94 |
+
except json.JSONDecodeError:
|
| 95 |
+
# Return as raw string if not valid JSON
|
| 96 |
+
logger.warning("Decrypted data is not valid JSON, returning raw string")
|
| 97 |
+
return {"raw_data": decrypted_str}
|
| 98 |
+
|
| 99 |
+
except Exception as e:
|
| 100 |
+
logger.error(f"Decryption failed: {e}")
|
| 101 |
+
raise ValueError(f"Failed to decrypt data: {e}")
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
def decrypt_multiple_blocks(encrypted_data: str, block_size: int = 344) -> list[Any]:
|
| 105 |
+
"""
|
| 106 |
+
Decrypt multiple concatenated encrypted blocks.
|
| 107 |
+
|
| 108 |
+
RSA 2048-bit encrypted data is typically 256 bytes, which is ~344 chars in base64.
|
| 109 |
+
|
| 110 |
+
Args:
|
| 111 |
+
encrypted_data: Concatenated base64-encoded encrypted blocks
|
| 112 |
+
block_size: Size of each encrypted block in base64 chars (default 344 for RSA-2048)
|
| 113 |
+
|
| 114 |
+
Returns:
|
| 115 |
+
List of decrypted data objects
|
| 116 |
+
"""
|
| 117 |
+
results = []
|
| 118 |
+
|
| 119 |
+
# If the data is smaller than block_size, treat as single block
|
| 120 |
+
if len(encrypted_data) <= block_size:
|
| 121 |
+
try:
|
| 122 |
+
result = decrypt_data(encrypted_data)
|
| 123 |
+
if result:
|
| 124 |
+
results.append(result)
|
| 125 |
+
except Exception as e:
|
| 126 |
+
logger.error(f"Failed to decrypt single block: {e}")
|
| 127 |
+
return results
|
| 128 |
+
|
| 129 |
+
# Split into blocks and decrypt each
|
| 130 |
+
for i in range(0, len(encrypted_data), block_size):
|
| 131 |
+
block = encrypted_data[i:i + block_size]
|
| 132 |
+
if block: # Skip empty blocks
|
| 133 |
+
try:
|
| 134 |
+
result = decrypt_data(block)
|
| 135 |
+
if result:
|
| 136 |
+
results.append(result)
|
| 137 |
+
except Exception as e:
|
| 138 |
+
logger.error(f"Failed to decrypt block {i // block_size}: {e}")
|
| 139 |
+
continue
|
| 140 |
+
|
| 141 |
+
return results
|
models.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
SQLAlchemy models for the URL Blink application.
|
| 3 |
+
"""
|
| 4 |
+
from sqlalchemy import Column, Integer, String, Text, DateTime, JSON
|
| 5 |
+
from sqlalchemy.sql import func
|
| 6 |
+
from database import Base
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class BlinkData(Base):
|
| 10 |
+
"""
|
| 11 |
+
Model for storing decrypted blink data.
|
| 12 |
+
|
| 13 |
+
Attributes:
|
| 14 |
+
id: Primary key
|
| 15 |
+
user_id: User identifier (first 20 chars from URL param)
|
| 16 |
+
refer_url: Referer URL from request header
|
| 17 |
+
json_data: Decrypted JSON data
|
| 18 |
+
created_at: Timestamp of record creation
|
| 19 |
+
"""
|
| 20 |
+
__tablename__ = "blink_data"
|
| 21 |
+
|
| 22 |
+
id = Column(Integer, primary_key=True, autoincrement=True, index=True)
|
| 23 |
+
user_id = Column(String(20), index=True, nullable=False)
|
| 24 |
+
refer_url = Column(Text, nullable=True)
|
| 25 |
+
json_data = Column(JSON, nullable=True)
|
| 26 |
+
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
| 27 |
+
|
| 28 |
+
def __repr__(self):
|
| 29 |
+
return f"<BlinkData(id={self.id}, user_id={self.user_id})>"
|
requirements.txt
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# FastAPI URL Blink Application Dependencies
|
| 2 |
+
fastapi>=0.104.0
|
| 3 |
+
uvicorn[standard]>=0.24.0
|
| 4 |
+
sqlalchemy>=2.0.0
|
| 5 |
+
aiosqlite>=0.19.0
|
| 6 |
+
cryptography>=41.0.0
|
| 7 |
+
pydantic>=2.0.0
|