|
|
from fastapi import FastAPI, UploadFile, File, Form, HTTPException |
|
|
from fastapi.middleware.cors import CORSMiddleware |
|
|
from fastapi.responses import JSONResponse |
|
|
import os, json, base64, asyncio |
|
|
import httpx |
|
|
from pathlib import Path |
|
|
from huggingface_hub import CommitScheduler |
|
|
from huggingface_hub import hf_hub_download |
|
|
from json import JSONDecodeError |
|
|
|
|
|
|
|
|
from urllib.parse import quote, unquote |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
app = FastAPI() |
|
|
|
|
|
app.add_middleware( |
|
|
CORSMiddleware, |
|
|
allow_origins=["*"], |
|
|
allow_methods=["*"], |
|
|
allow_headers=["*"], |
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
ALL_TOURS = [ |
|
|
"Benelux", |
|
|
"Epic Rail 1", "Epic Rail 2", "Epic Rail 3", |
|
|
"Haida Gwaii 1", "Haida Gwaii 2", "Haida Gwaii 3", |
|
|
"Haida Gwaii 4", "Haida Gwaii 5", "Haida Gwaii 6", "Haida Gwaii 7", |
|
|
"Harrison", |
|
|
"Ireland", |
|
|
"Island Hopping", "Island Hopping 2", |
|
|
"Kootenays", |
|
|
"Maritimes 1", "Maritimes 2", |
|
|
"New Zealand", |
|
|
"Newfoundland 1", "Newfoundland 2", "Newfoundland 3", "Newfoundland 4", |
|
|
"Okanagan", |
|
|
"Portugal", |
|
|
"Quebec", |
|
|
"Quebec Holiday", |
|
|
"Scotland", |
|
|
"Sea to Sky", |
|
|
"Skeena", |
|
|
"Tofino 1", "Tofino 2", |
|
|
"Van Isle", |
|
|
"Yukon 1", "Yukon 2", "Yukon 3", "Yukon 4", |
|
|
"Yukon Winter", |
|
|
] |
|
|
|
|
|
ADMIN_TOKEN = os.environ.get("ADMIN_TOKEN") |
|
|
GOOGLE_KEY = os.environ.get("GOOGLE_KEY") |
|
|
HF_TOKEN = os.environ.get("HF_TOKEN") |
|
|
|
|
|
SHEET_ID = "1o0AUq13j-7LZWDhCwFYgq07niZtvOya5iE5bbRQMGWc" |
|
|
|
|
|
DATASET_REPO = "SalexAI/mztimgs" |
|
|
DATASET_DIR = Path("dataset_cache") |
|
|
DATASET_DIR.mkdir(parents=True, exist_ok=True) |
|
|
|
|
|
if not ADMIN_TOKEN: |
|
|
print("⚠️ WARNING: ADMIN_TOKEN not set") |
|
|
|
|
|
if not GOOGLE_KEY: |
|
|
print("⚠️ WARNING: GOOGLE_KEY not set") |
|
|
|
|
|
from urllib.parse import quote |
|
|
|
|
|
|
|
|
def normalize_tour(tour: str) -> str: |
|
|
return unquote(tour).strip() |
|
|
|
|
|
|
|
|
async def fetch_from_hf(tour: str) -> dict | None: |
|
|
""" |
|
|
Correct HF fetch: |
|
|
- filenames are ALWAYS decoded |
|
|
- HF handles URL encoding internally |
|
|
""" |
|
|
|
|
|
filename = f"{tour}.json" |
|
|
print("🔍 HF HUB DOWNLOAD TRY:", filename) |
|
|
|
|
|
try: |
|
|
path = await asyncio.to_thread( |
|
|
hf_hub_download, |
|
|
repo_id=DATASET_REPO, |
|
|
repo_type="dataset", |
|
|
filename=f"data/{filename}", |
|
|
token=HF_TOKEN, |
|
|
) |
|
|
|
|
|
print("⬇️ HF HUB DOWNLOADED:", path) |
|
|
|
|
|
with open(path, "r", encoding="utf-8") as f: |
|
|
return json.load(f) |
|
|
|
|
|
except Exception as e: |
|
|
print("❌ HF HUB DOWNLOAD FAILED:", str(e)) |
|
|
return None |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
scheduler = CommitScheduler( |
|
|
repo_id=DATASET_REPO, |
|
|
repo_type="dataset", |
|
|
folder_path=DATASET_DIR, |
|
|
path_in_repo="data", |
|
|
token=HF_TOKEN, |
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import re |
|
|
|
|
|
def has_images(data: dict) -> bool: |
|
|
imgs = data.get("images", {}) |
|
|
return bool( |
|
|
imgs.get("banner") or |
|
|
imgs.get("cover") or |
|
|
imgs.get("carousel") |
|
|
) |
|
|
|
|
|
|
|
|
def get_fallback_tours(requested: str) -> list[str]: |
|
|
""" |
|
|
If 'Maritimes' is requested → returns ['Maritimes 1', 'Maritimes 2'] |
|
|
If 'Haida Gwaii' → ['Haida Gwaii 1' ...] |
|
|
Otherwise empty list |
|
|
""" |
|
|
base = requested.strip() |
|
|
|
|
|
|
|
|
if re.search(r"\s\d+$", base): |
|
|
return [] |
|
|
|
|
|
matches = [] |
|
|
for t in ALL_TOURS: |
|
|
if t.startswith(base + " "): |
|
|
matches.append(t) |
|
|
|
|
|
|
|
|
def tour_num(name): |
|
|
m = re.search(r"(\d+)$", name) |
|
|
return int(m.group(1)) if m else 0 |
|
|
|
|
|
return sorted(matches, key=tour_num) |
|
|
|
|
|
def empty_structure(): |
|
|
return { |
|
|
"images": { |
|
|
"banner": "", |
|
|
"cover": "", |
|
|
"carousel": [] |
|
|
} |
|
|
} |
|
|
|
|
|
def tour_path(tour: str) -> Path: |
|
|
return DATASET_DIR / f"{tour}.json" |
|
|
|
|
|
def load_json(path: Path) -> dict: |
|
|
if not path.exists(): |
|
|
return empty_structure() |
|
|
with path.open("r", encoding="utf-8") as f: |
|
|
return json.load(f) |
|
|
|
|
|
def save_json(path: Path, data: dict): |
|
|
with path.open("w", encoding="utf-8") as f: |
|
|
json.dump(data, f, indent=2) |
|
|
|
|
|
def require_admin(token: str): |
|
|
if not ADMIN_TOKEN or token != ADMIN_TOKEN: |
|
|
raise HTTPException(status_code=403, detail="Invalid admin token") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.get("/imageget/{tour}.json") |
|
|
async def get_images(tour: str): |
|
|
tour = normalize_tour(tour) |
|
|
|
|
|
|
|
|
path = tour_path(tour) |
|
|
if path.exists(): |
|
|
data = load_json(path) |
|
|
if has_images(data): |
|
|
return data |
|
|
|
|
|
data = await fetch_from_hf(tour) |
|
|
if data and has_images(data): |
|
|
save_json(path, data) |
|
|
return data |
|
|
|
|
|
|
|
|
for alt in get_fallback_tours(tour): |
|
|
alt_path = tour_path(alt) |
|
|
|
|
|
if alt_path.exists(): |
|
|
alt_data = load_json(alt_path) |
|
|
if has_images(alt_data): |
|
|
return alt_data |
|
|
|
|
|
alt_data = await fetch_from_hf(alt) |
|
|
if alt_data and has_images(alt_data): |
|
|
save_json(alt_path, alt_data) |
|
|
return alt_data |
|
|
|
|
|
|
|
|
return empty_structure() |
|
|
|
|
|
@app.get("/") |
|
|
async def root_status(): |
|
|
tours = [] |
|
|
|
|
|
for path in DATASET_DIR.glob("*.json"): |
|
|
try: |
|
|
with path.open("r", encoding="utf-8") as f: |
|
|
data = json.load(f) |
|
|
|
|
|
images = data.get("images", {}) |
|
|
|
|
|
banner = bool(images.get("banner")) |
|
|
cover = bool(images.get("cover")) |
|
|
carousel_count = len(images.get("carousel", [])) |
|
|
|
|
|
tours.append({ |
|
|
"tour": path.stem, |
|
|
"banner": banner, |
|
|
"cover": cover, |
|
|
"carousel": carousel_count, |
|
|
"total_images": int(banner) + int(cover) + carousel_count |
|
|
}) |
|
|
|
|
|
except Exception as e: |
|
|
tours.append({ |
|
|
"tour": path.stem, |
|
|
"error": str(e) |
|
|
}) |
|
|
|
|
|
return { |
|
|
"status": "ok", |
|
|
"service": "Mile Zero Tours Image API", |
|
|
"cached_tours": len(tours), |
|
|
"tours": sorted(tours, key=lambda t: t.get("tour", "")) |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.post("/imageupload/{tour}") |
|
|
async def upload_image( |
|
|
tour: str, |
|
|
admin_token: str = Form(...), |
|
|
slot: str = Form(...), |
|
|
file: UploadFile = File(None), |
|
|
base64_data: str = Form(None), |
|
|
): |
|
|
require_admin(admin_token) |
|
|
|
|
|
if slot not in ("banner", "cover", "carousel"): |
|
|
raise HTTPException(status_code=400, detail="Invalid slot") |
|
|
|
|
|
if not file and not base64_data: |
|
|
raise HTTPException(status_code=400, detail="No image provided") |
|
|
|
|
|
path = tour_path(tour) |
|
|
|
|
|
with scheduler.lock: |
|
|
data = load_json(path) |
|
|
|
|
|
if file: |
|
|
raw = await file.read() |
|
|
b64 = base64.b64encode(raw).decode("utf-8") |
|
|
else: |
|
|
b64 = base64_data.strip() |
|
|
|
|
|
if slot == "carousel": |
|
|
data["images"]["carousel"].append(b64) |
|
|
else: |
|
|
data["images"][slot] = b64 |
|
|
|
|
|
save_json(path, data) |
|
|
|
|
|
return { |
|
|
"ok": True, |
|
|
"tour": tour, |
|
|
"slot": slot, |
|
|
"carousel_len": len(data["images"]["carousel"]), |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.post("/imagedelete/{tour}") |
|
|
async def delete_image( |
|
|
tour: str, |
|
|
admin_token: str = Form(...), |
|
|
slot: str = Form(...), |
|
|
index: int = Form(None), |
|
|
): |
|
|
require_admin(admin_token) |
|
|
|
|
|
path = tour_path(tour) |
|
|
|
|
|
with scheduler.lock: |
|
|
data = load_json(path) |
|
|
|
|
|
if slot == "carousel": |
|
|
if index is None or index >= len(data["images"]["carousel"]): |
|
|
raise HTTPException(status_code=400, detail="Invalid index") |
|
|
data["images"]["carousel"].pop(index) |
|
|
elif slot in ("banner", "cover"): |
|
|
data["images"][slot] = "" |
|
|
else: |
|
|
raise HTTPException(status_code=400, detail="Invalid slot") |
|
|
|
|
|
save_json(path, data) |
|
|
|
|
|
return {"ok": True} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.get("/sheets/{range:path}") |
|
|
async def proxy_google_sheets(range: str): |
|
|
if not GOOGLE_KEY: |
|
|
raise HTTPException(status_code=503, detail="Sheets proxy not configured") |
|
|
|
|
|
url = ( |
|
|
f"https://sheets.googleapis.com/v4/spreadsheets/" |
|
|
f"{SHEET_ID}/values/{range}" |
|
|
f"?key={GOOGLE_KEY}" |
|
|
) |
|
|
|
|
|
async with httpx.AsyncClient(timeout=15) as client: |
|
|
r = await client.get(url) |
|
|
|
|
|
if r.status_code != 200: |
|
|
return JSONResponse( |
|
|
status_code=r.status_code, |
|
|
content={"error": "Google Sheets fetch failed"} |
|
|
) |
|
|
|
|
|
return r.json() |
|
|
|