richtext commited on
Commit
3674b4b
·
verified ·
1 Parent(s): 337fd75

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .dockerignore +50 -0
  2. .gitattributes +12 -0
  3. .gitignore +47 -0
  4. Dockerfile +71 -0
  5. README.md +61 -11
  6. backend/__init__.py +0 -0
  7. backend/api/__init__.py +12 -0
  8. backend/api/admin.py +345 -0
  9. backend/api/auth.py +114 -0
  10. backend/api/box.py +437 -0
  11. backend/api/groups.py +337 -0
  12. backend/api/pipeline.py +267 -0
  13. backend/api/sensors.py +97 -0
  14. backend/api/sites.py +129 -0
  15. backend/api/users.py +227 -0
  16. backend/config.py +106 -0
  17. backend/core/__init__.py +0 -0
  18. backend/core/access_control.py +165 -0
  19. backend/core/dependencies.py +75 -0
  20. backend/core/security.py +57 -0
  21. backend/database.py +144 -0
  22. backend/main.py +441 -0
  23. backend/models/__init__.py +12 -0
  24. backend/models/box_connection.py +69 -0
  25. backend/models/group.py +19 -0
  26. backend/models/pipeline.py +43 -0
  27. backend/models/sensor_data.py +23 -0
  28. backend/models/site.py +78 -0
  29. backend/models/user.py +36 -0
  30. backend/requirements.txt +15 -0
  31. backend/schemas/__init__.py +11 -0
  32. backend/schemas/admin.py +45 -0
  33. backend/schemas/auth.py +25 -0
  34. backend/schemas/group.py +60 -0
  35. backend/schemas/pipeline.py +56 -0
  36. backend/schemas/sensor.py +22 -0
  37. backend/schemas/site.py +49 -0
  38. backend/schemas/user.py +41 -0
  39. backend/services/__init__.py +0 -0
  40. backend/services/auth.py +84 -0
  41. backend/services/box_integration.py +312 -0
  42. backend/services/box_worker.py +404 -0
  43. backend/services/data_import.py +271 -0
  44. backend/services/pipeline_worker.py +383 -0
  45. backend/static/CSG_LOGO_light.png +3 -0
  46. backend/static/apple-touch-icon.png +0 -0
  47. backend/static/assets/SiteMapContent-BZBnHuK2.js +0 -0
  48. backend/static/assets/index-CNGvhoA_.css +1 -0
  49. backend/static/assets/index-ffXrP8Fx.js +0 -0
  50. backend/static/box-logo.png +0 -0
.dockerignore ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Git
2
+ .git
3
+ .gitignore
4
+
5
+ # Python
6
+ __pycache__
7
+ *.py[cod]
8
+ *$py.class
9
+ *.so
10
+ .Python
11
+ .venv
12
+ venv
13
+ ENV
14
+ .eggs
15
+ *.egg-info
16
+ .pytest_cache
17
+
18
+ # Node
19
+ node_modules
20
+ frontend/node_modules
21
+
22
+ # IDE
23
+ .vscode
24
+ .idea
25
+ *.swp
26
+ *.swo
27
+
28
+ # Local data
29
+ data/
30
+ *.db
31
+ uploads/
32
+ archives/
33
+
34
+ # Environment files (secrets are in HF Spaces settings)
35
+ .env
36
+ .env.local
37
+ .env.*.local
38
+
39
+ # Documentation and misc
40
+ LICENSE
41
+ docs/
42
+
43
+ # Build artifacts
44
+ dist/
45
+ build/
46
+ frontend/dist/
47
+
48
+ # OS files
49
+ .DS_Store
50
+ Thumbs.db
.gitattributes CHANGED
@@ -33,3 +33,15 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ backend/static/CSG_LOGO_light.png filter=lfs diff=lfs merge=lfs -text
37
+ backend/static/logo.png filter=lfs diff=lfs merge=lfs -text
38
+ backend/static/trex-logo.png filter=lfs diff=lfs merge=lfs -text
39
+ backend/static/videos/splash.mp4 filter=lfs diff=lfs merge=lfs -text
40
+ backend/static/videos/splash_2.mp4 filter=lfs diff=lfs merge=lfs -text
41
+ backend/static/videos/splash_3.mp4 filter=lfs diff=lfs merge=lfs -text
42
+ frontend/public/CSG_LOGO_light.png filter=lfs diff=lfs merge=lfs -text
43
+ frontend/public/logo.png filter=lfs diff=lfs merge=lfs -text
44
+ frontend/public/trex-logo.png filter=lfs diff=lfs merge=lfs -text
45
+ frontend/public/videos/splash.mp4 filter=lfs diff=lfs merge=lfs -text
46
+ frontend/public/videos/splash_2.mp4 filter=lfs diff=lfs merge=lfs -text
47
+ frontend/public/videos/splash_3.mp4 filter=lfs diff=lfs merge=lfs -text
.gitignore ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Dependencies
2
+ node_modules/
3
+ __pycache__/
4
+ *.py[cod]
5
+ *$py.class
6
+ .venv/
7
+ venv/
8
+ env/
9
+
10
+ # Build outputs
11
+ dist/
12
+ build/
13
+ *.egg-info/
14
+ .eggs/
15
+
16
+ # Data directories (contain sensitive data)
17
+ data/
18
+ uploads/
19
+ archives/
20
+
21
+ # Environment files
22
+ .env
23
+ .env.local
24
+ .env.*.local
25
+
26
+ # IDE
27
+ .vscode/
28
+ .idea/
29
+ *.swp
30
+ *.swo
31
+ *~
32
+
33
+ # OS
34
+ .DS_Store
35
+ Thumbs.db
36
+
37
+ # Logs
38
+ *.log
39
+ logs/
40
+
41
+ # Testing
42
+ .coverage
43
+ htmlcov/
44
+ .pytest_cache/
45
+
46
+ # Keep gitkeep files
47
+ !.gitkeep
Dockerfile ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # HuggingFace Spaces Docker deployment for Crop Dashboard
2
+ # Multi-stage build for smaller final image
3
+
4
+ # Stage 1: Build frontend
5
+ FROM node:20-alpine AS frontend-builder
6
+
7
+ WORKDIR /app/frontend
8
+
9
+ # Copy package files
10
+ COPY frontend/package*.json ./
11
+
12
+ # Install dependencies
13
+ RUN npm ci
14
+
15
+ # Copy frontend source
16
+ COPY frontend/ ./
17
+
18
+ # Build frontend
19
+ RUN npm run build
20
+
21
+ # Stage 2: Python backend + serve frontend
22
+ FROM python:3.11-slim
23
+
24
+ # Set environment variables
25
+ ENV PYTHONDONTWRITEBYTECODE=1
26
+ ENV PYTHONUNBUFFERED=1
27
+ ENV PORT=7860
28
+
29
+ # Install system dependencies for SQLCipher
30
+ RUN apt-get update && apt-get install -y --no-install-recommends \
31
+ gcc \
32
+ libsqlcipher-dev \
33
+ && rm -rf /var/lib/apt/lists/*
34
+
35
+ WORKDIR /app
36
+
37
+ # Copy requirements and install Python dependencies
38
+ COPY backend/requirements.txt .
39
+ RUN pip install --no-cache-dir -r requirements.txt
40
+
41
+ # Copy backend code
42
+ COPY backend/ ./backend/
43
+
44
+ # Copy built frontend from Stage 1 to backend/static (where main.py expects it)
45
+ COPY --from=frontend-builder /app/frontend/dist ./backend/static
46
+
47
+ # Create non-root user for security (HF Spaces requirement)
48
+ RUN useradd -m -u 1000 user
49
+
50
+ # Create data directory with proper permissions AFTER creating user
51
+ RUN mkdir -p /data /data/uploads /data/archives && \
52
+ chown -R user:user /data
53
+
54
+ # Switch to non-root user
55
+ USER user
56
+
57
+ # Set HOME for the user
58
+ ENV HOME=/home/user
59
+ ENV PATH="/home/user/.local/bin:$PATH"
60
+
61
+ # Set database URL to use persistent /data directory
62
+ ENV DATABASE_URL=sqlite:////data/crop_dashboard.db
63
+
64
+ # Set demo mode to disable Box integration
65
+ ENV DEMO_MODE=true
66
+
67
+ # Expose port 7860 (HuggingFace Spaces requirement)
68
+ EXPOSE 7860
69
+
70
+ # Run the application
71
+ CMD ["python", "-m", "uvicorn", "backend.main:app", "--host", "0.0.0.0", "--port", "7860"]
README.md CHANGED
@@ -1,11 +1,61 @@
1
- ---
2
- title: Csg Dash Demo
3
- emoji: 🌖
4
- colorFrom: blue
5
- colorTo: yellow
6
- sdk: docker
7
- pinned: false
8
- license: cc0-1.0
9
- ---
10
-
11
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: Crop Dashboard Platform
3
+ emoji: 🌾
4
+ colorFrom: green
5
+ colorTo: blue
6
+ sdk: docker
7
+ pinned: false
8
+ license: mit
9
+ app_port: 7860
10
+ ---
11
+
12
+ # Crop Dashboard Platform
13
+
14
+ A professional agricultural sensor data management platform built with FastAPI and React.
15
+
16
+ ## Features
17
+
18
+ - Real-time sensor data visualization
19
+ - Multi-site management with crop categorization
20
+ - Role-based access control (RBAC)
21
+ - Group-based site access management
22
+ - Encrypted SQLite database (SQLCipher)
23
+ - JWT authentication with refresh tokens
24
+ - Dark/Light mode support
25
+
26
+ ## Demo Accounts
27
+
28
+ This demo uses synthetic data that is regenerated on each restart.
29
+
30
+ **Grower One:**
31
+ - Email: `grower1@demo.cropdash.dev`
32
+ - Password: `demo123`
33
+ - Access: Sites 01-05
34
+
35
+ **Grower Two:**
36
+ - Email: `grower2@demo.cropdash.dev`
37
+ - Password: `demo123`
38
+ - Access: Sites 06-10
39
+
40
+ **Admin Access:**
41
+ - Email: `admin@cropdash.dev`
42
+ - Password: Please inquire
43
+
44
+ > **Note:** All data displayed is synthetically generated for demonstration purposes only.
45
+
46
+ ## Environment Variables
47
+
48
+ Configure these as Secrets in your HuggingFace Space settings:
49
+
50
+ | Variable | Description | Required |
51
+ |----------|-------------|----------|
52
+ | `SECRET_KEY` | JWT access token signing key (min 32 chars) | Yes |
53
+ | `REFRESH_SECRET_KEY` | JWT refresh token signing key (min 32 chars) | Yes |
54
+ | `DB_ENCRYPTION_KEY` | SQLCipher database encryption key (64+ chars recommended) | Yes |
55
+ | `ADMIN_PASSWORD` | Admin account password | Yes |
56
+
57
+ ## Tech Stack
58
+
59
+ - **Backend:** FastAPI, SQLAlchemy, SQLCipher
60
+ - **Frontend:** React, TypeScript, Tailwind CSS, Plotly
61
+ - **Auth:** JWT tokens with refresh token support and token versioning
backend/__init__.py ADDED
File without changes
backend/api/__init__.py ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter
2
+ from backend.api import auth, users, sites, sensors, groups, admin, pipeline, box
3
+
4
+ api_router = APIRouter()
5
+ api_router.include_router(auth.router, prefix="/auth", tags=["Authentication"])
6
+ api_router.include_router(users.router, prefix="/users", tags=["Users"])
7
+ api_router.include_router(groups.router, prefix="/groups", tags=["Groups"])
8
+ api_router.include_router(sites.router, prefix="/sites", tags=["Sites"])
9
+ api_router.include_router(sensors.router, prefix="/sensors", tags=["Sensors"])
10
+ api_router.include_router(admin.router, prefix="/admin", tags=["Admin"])
11
+ api_router.include_router(pipeline.router, prefix="/pipeline", tags=["Pipeline"])
12
+ api_router.include_router(box.router, prefix="/box", tags=["Box Integration"])
backend/api/admin.py ADDED
@@ -0,0 +1,345 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+ from io import StringIO
3
+ import csv
4
+ from fastapi import APIRouter, HTTPException, status, Query
5
+ from fastapi.responses import StreamingResponse
6
+ from sqlalchemy import or_, cast, String
7
+ from backend.core.dependencies import DbSession, AdminUser
8
+ from backend.schemas.admin import (
9
+ AuditLogResponse, SystemStatsResponse, SiteCreate, SiteUpdate
10
+ )
11
+ from backend.schemas.site import SiteResponse
12
+ from backend.models import (
13
+ User, Group, Site, Crop, AuditLog, SensorData,
14
+ EquipmentGroup, Parameter
15
+ )
16
+
17
+ router = APIRouter()
18
+
19
+
20
+ @router.get("/stats", response_model=SystemStatsResponse)
21
+ def get_system_stats(db: DbSession, admin: AdminUser):
22
+ return SystemStatsResponse(
23
+ total_users=db.query(User).count(),
24
+ active_users=db.query(User).filter(User.is_active == True).count(),
25
+ total_groups=db.query(Group).count(),
26
+ total_sites=db.query(Site).count(),
27
+ active_sites=db.query(Site).filter(Site.is_active == True).count(),
28
+ total_sensor_records=db.query(SensorData).count(),
29
+ total_parameters=db.query(Parameter).count(),
30
+ total_equipment_groups=db.query(EquipmentGroup).count()
31
+ )
32
+
33
+
34
+ @router.get("/audit", response_model=list[AuditLogResponse])
35
+ def get_audit_logs(
36
+ db: DbSession,
37
+ admin: AdminUser,
38
+ limit: int = Query(default=100, le=1000),
39
+ offset: int = Query(default=0),
40
+ user_id: str | None = None,
41
+ action: str | None = None,
42
+ search: str | None = Query(default=None, description="Search in user email, IP address, resource type/id, and details"),
43
+ start_date: datetime | None = Query(default=None, description="Filter logs from this date (ISO format)"),
44
+ end_date: datetime | None = Query(default=None, description="Filter logs until this date (ISO format)")
45
+ ):
46
+ """Get audit logs with optional filtering and search."""
47
+ query = db.query(AuditLog).order_by(AuditLog.created_at.desc())
48
+
49
+ if user_id:
50
+ query = query.filter(AuditLog.user_id == user_id)
51
+ if action:
52
+ query = query.filter(AuditLog.action == action)
53
+ if start_date:
54
+ query = query.filter(AuditLog.created_at >= start_date)
55
+ if end_date:
56
+ query = query.filter(AuditLog.created_at <= end_date)
57
+
58
+ # Get all user emails for search matching
59
+ all_users = db.query(User).all()
60
+ user_map = {u.id: u.email for u in all_users}
61
+ email_to_id = {u.email.lower(): u.id for u in all_users}
62
+
63
+ # Apply search filter
64
+ if search:
65
+ search_lower = search.lower()
66
+ # First check if search matches a user email
67
+ matching_user_ids = [uid for email, uid in email_to_id.items() if search_lower in email]
68
+
69
+ # Build OR conditions for search
70
+ search_conditions = [
71
+ AuditLog.ip_address.ilike(f"%{search}%"),
72
+ AuditLog.resource_type.ilike(f"%{search}%"),
73
+ AuditLog.resource_id.ilike(f"%{search}%"),
74
+ AuditLog.action.ilike(f"%{search}%"),
75
+ cast(AuditLog.details, String).ilike(f"%{search}%"),
76
+ ]
77
+
78
+ # Add user_id matches if any emails matched
79
+ if matching_user_ids:
80
+ search_conditions.append(AuditLog.user_id.in_(matching_user_ids))
81
+
82
+ query = query.filter(or_(*search_conditions))
83
+
84
+ logs = query.offset(offset).limit(limit).all()
85
+
86
+ return [
87
+ AuditLogResponse(
88
+ id=log.id,
89
+ user_id=log.user_id,
90
+ user_email=user_map.get(log.user_id) if log.user_id else None,
91
+ action=log.action,
92
+ resource_type=log.resource_type,
93
+ resource_id=log.resource_id,
94
+ details=log.details,
95
+ ip_address=log.ip_address,
96
+ created_at=log.created_at
97
+ )
98
+ for log in logs
99
+ ]
100
+
101
+
102
+ @router.get("/audit/export")
103
+ def export_audit_logs(
104
+ db: DbSession,
105
+ admin: AdminUser,
106
+ user_id: str | None = None,
107
+ action: str | None = None,
108
+ search: str | None = None,
109
+ start_date: datetime | None = None,
110
+ end_date: datetime | None = None
111
+ ):
112
+ """Export audit logs as CSV file."""
113
+ query = db.query(AuditLog).order_by(AuditLog.created_at.desc())
114
+
115
+ if user_id:
116
+ query = query.filter(AuditLog.user_id == user_id)
117
+ if action:
118
+ query = query.filter(AuditLog.action == action)
119
+ if start_date:
120
+ query = query.filter(AuditLog.created_at >= start_date)
121
+ if end_date:
122
+ query = query.filter(AuditLog.created_at <= end_date)
123
+
124
+ # Get all user emails for mapping
125
+ all_users = db.query(User).all()
126
+ user_map = {u.id: u.email for u in all_users}
127
+ email_to_id = {u.email.lower(): u.id for u in all_users}
128
+
129
+ # Apply search filter
130
+ if search:
131
+ search_lower = search.lower()
132
+ matching_user_ids = [uid for email, uid in email_to_id.items() if search_lower in email]
133
+ search_conditions = [
134
+ AuditLog.ip_address.ilike(f"%{search}%"),
135
+ AuditLog.resource_type.ilike(f"%{search}%"),
136
+ AuditLog.resource_id.ilike(f"%{search}%"),
137
+ AuditLog.action.ilike(f"%{search}%"),
138
+ cast(AuditLog.details, String).ilike(f"%{search}%"),
139
+ ]
140
+ if matching_user_ids:
141
+ search_conditions.append(AuditLog.user_id.in_(matching_user_ids))
142
+ query = query.filter(or_(*search_conditions))
143
+
144
+ logs = query.all()
145
+
146
+ # Create CSV in memory
147
+ output = StringIO()
148
+ writer = csv.writer(output)
149
+
150
+ # Write header
151
+ writer.writerow([
152
+ "ID", "Timestamp (UTC)", "User Email", "Action", "Resource Type",
153
+ "Resource ID", "IP Address", "Details"
154
+ ])
155
+
156
+ # Write data rows
157
+ for log in logs:
158
+ writer.writerow([
159
+ log.id,
160
+ log.created_at.isoformat() if log.created_at else "",
161
+ user_map.get(log.user_id, "") if log.user_id else "",
162
+ log.action,
163
+ log.resource_type or "",
164
+ log.resource_id or "",
165
+ log.ip_address or "",
166
+ str(log.details) if log.details else ""
167
+ ])
168
+
169
+ output.seek(0)
170
+
171
+ # Generate filename with timestamp
172
+ timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
173
+ filename = f"audit_log_export_{timestamp}.csv"
174
+
175
+ return StreamingResponse(
176
+ iter([output.getvalue()]),
177
+ media_type="text/csv",
178
+ headers={"Content-Disposition": f"attachment; filename={filename}"}
179
+ )
180
+
181
+
182
+ @router.get("/sites", response_model=list[SiteResponse])
183
+ def list_all_sites(db: DbSession, admin: AdminUser):
184
+ """List all sites (including inactive) for admin management."""
185
+ sites = db.query(Site).all()
186
+ return [
187
+ SiteResponse(
188
+ id=s.id,
189
+ site_code=s.site_code,
190
+ name=s.name,
191
+ crop_id=s.crop_id,
192
+ crop_name=s.crop.display_name if s.crop else None,
193
+ latitude=s.latitude,
194
+ longitude=s.longitude,
195
+ is_active=s.is_active
196
+ )
197
+ for s in sites
198
+ ]
199
+
200
+
201
+ @router.post("/sites", response_model=SiteResponse, status_code=status.HTTP_201_CREATED)
202
+ def create_site(site_data: SiteCreate, db: DbSession, admin: AdminUser):
203
+ existing = db.query(Site).filter(Site.site_code == site_data.site_code).first()
204
+ if existing:
205
+ raise HTTPException(
206
+ status_code=status.HTTP_400_BAD_REQUEST,
207
+ detail="Site with this code already exists"
208
+ )
209
+
210
+ crop = db.query(Crop).filter(Crop.id == site_data.crop_id).first()
211
+ if not crop:
212
+ raise HTTPException(
213
+ status_code=status.HTTP_400_BAD_REQUEST,
214
+ detail="Invalid crop_id"
215
+ )
216
+
217
+ site = Site(
218
+ site_code=site_data.site_code,
219
+ name=site_data.name,
220
+ crop_id=site_data.crop_id,
221
+ latitude=site_data.latitude,
222
+ longitude=site_data.longitude
223
+ )
224
+ db.add(site)
225
+ db.commit()
226
+ db.refresh(site)
227
+
228
+ # Log the action
229
+ log = AuditLog(
230
+ user_id=admin.id,
231
+ action="site_created",
232
+ resource_type="site",
233
+ resource_id=site.id,
234
+ details={"site_code": site.site_code, "name": site.name}
235
+ )
236
+ db.add(log)
237
+ db.commit()
238
+
239
+ return SiteResponse(
240
+ id=site.id,
241
+ site_code=site.site_code,
242
+ name=site.name,
243
+ crop_id=site.crop_id,
244
+ crop_name=crop.display_name,
245
+ latitude=site.latitude,
246
+ longitude=site.longitude,
247
+ is_active=site.is_active
248
+ )
249
+
250
+
251
+ @router.put("/sites/{site_id}", response_model=SiteResponse)
252
+ def update_site(site_id: str, site_data: SiteUpdate, db: DbSession, admin: AdminUser):
253
+ site = db.query(Site).filter(Site.id == site_id).first()
254
+ if not site:
255
+ raise HTTPException(
256
+ status_code=status.HTTP_404_NOT_FOUND,
257
+ detail="Site not found"
258
+ )
259
+
260
+ if site_data.site_code is not None:
261
+ existing = db.query(Site).filter(Site.site_code == site_data.site_code, Site.id != site_id).first()
262
+ if existing:
263
+ raise HTTPException(
264
+ status_code=status.HTTP_400_BAD_REQUEST,
265
+ detail="Site with this code already exists"
266
+ )
267
+ site.site_code = site_data.site_code
268
+
269
+ if site_data.name is not None:
270
+ site.name = site_data.name
271
+ if site_data.crop_id is not None:
272
+ crop = db.query(Crop).filter(Crop.id == site_data.crop_id).first()
273
+ if not crop:
274
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid crop_id")
275
+ site.crop_id = site_data.crop_id
276
+ if site_data.latitude is not None:
277
+ site.latitude = site_data.latitude
278
+ if site_data.longitude is not None:
279
+ site.longitude = site_data.longitude
280
+ if site_data.is_active is not None:
281
+ site.is_active = site_data.is_active
282
+
283
+ db.commit()
284
+ db.refresh(site)
285
+
286
+ # Log the action
287
+ log = AuditLog(
288
+ user_id=admin.id,
289
+ action="site_updated",
290
+ resource_type="site",
291
+ resource_id=site.id,
292
+ details={"site_code": site.site_code}
293
+ )
294
+ db.add(log)
295
+ db.commit()
296
+
297
+ return SiteResponse(
298
+ id=site.id,
299
+ site_code=site.site_code,
300
+ name=site.name,
301
+ crop_id=site.crop_id,
302
+ crop_name=site.crop.display_name if site.crop else None,
303
+ latitude=site.latitude,
304
+ longitude=site.longitude,
305
+ is_active=site.is_active
306
+ )
307
+
308
+
309
+ @router.delete("/sites/{site_id}", status_code=status.HTTP_204_NO_CONTENT)
310
+ def delete_site(site_id: str, db: DbSession, admin: AdminUser):
311
+ site = db.query(Site).filter(Site.id == site_id).first()
312
+ if not site:
313
+ raise HTTPException(
314
+ status_code=status.HTTP_404_NOT_FOUND,
315
+ detail="Site not found"
316
+ )
317
+
318
+ # Log before deletion
319
+ log = AuditLog(
320
+ user_id=admin.id,
321
+ action="site_deleted",
322
+ resource_type="site",
323
+ resource_id=site.id,
324
+ details={"site_code": site.site_code, "name": site.name}
325
+ )
326
+ db.add(log)
327
+
328
+ db.delete(site)
329
+ db.commit()
330
+
331
+
332
+ @router.get("/crops", response_model=list[dict])
333
+ def list_crops_admin(db: DbSession, admin: AdminUser):
334
+ """List all crops with site counts for admin."""
335
+ crops = db.query(Crop).all()
336
+ return [
337
+ {
338
+ "id": c.id,
339
+ "name": c.name,
340
+ "display_name": c.display_name,
341
+ "color": c.color,
342
+ "site_count": db.query(Site).filter(Site.crop_id == c.id).count()
343
+ }
344
+ for c in crops
345
+ ]
backend/api/auth.py ADDED
@@ -0,0 +1,114 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter, HTTPException, status, Request
2
+ from backend.core.dependencies import DbSession, CurrentUser
3
+ from backend.schemas.auth import LoginRequest, Token, RefreshRequest
4
+ from backend.schemas.user import UserResponse
5
+ from backend.services.auth import authenticate_user, create_token_for_user
6
+ from backend.core.security import verify_refresh_token
7
+ from backend.models import User, AuditLog
8
+
9
+ router = APIRouter()
10
+
11
+
12
+ @router.post("/login", response_model=Token)
13
+ def login(request: LoginRequest, db: DbSession, req: Request):
14
+ user = authenticate_user(db, request.email, request.password)
15
+ if not user:
16
+ # Log failed login attempt
17
+ log = AuditLog(
18
+ action="login_failed",
19
+ details={"email": request.email},
20
+ ip_address=req.client.host if req.client else None
21
+ )
22
+ db.add(log)
23
+ db.commit()
24
+ raise HTTPException(
25
+ status_code=status.HTTP_401_UNAUTHORIZED,
26
+ detail="Incorrect email or password",
27
+ headers={"WWW-Authenticate": "Bearer"},
28
+ )
29
+ if not user.is_active:
30
+ raise HTTPException(
31
+ status_code=status.HTTP_403_FORBIDDEN,
32
+ detail="User account is disabled",
33
+ )
34
+
35
+ # Log successful login
36
+ log = AuditLog(
37
+ user_id=user.id,
38
+ action="login_success",
39
+ details={"email": user.email},
40
+ ip_address=req.client.host if req.client else None
41
+ )
42
+ db.add(log)
43
+ db.commit()
44
+
45
+ return create_token_for_user(db, user)
46
+
47
+
48
+ @router.get("/me", response_model=UserResponse)
49
+ def get_current_user_info(current_user: CurrentUser, db: DbSession):
50
+ # Get group names for response
51
+ group_names = [ug.group.name for ug in current_user.groups]
52
+ return UserResponse(
53
+ id=current_user.id,
54
+ email=current_user.email,
55
+ full_name=current_user.full_name,
56
+ is_admin=current_user.is_admin,
57
+ is_active=current_user.is_active,
58
+ created_at=current_user.created_at,
59
+ last_login=current_user.last_login,
60
+ groups=group_names
61
+ )
62
+
63
+
64
+ @router.post("/logout")
65
+ def logout(current_user: CurrentUser, db: DbSession):
66
+ """Logout user by invalidating all their tokens."""
67
+ current_user.token_version += 1
68
+ db.commit()
69
+ return {"message": "Successfully logged out"}
70
+
71
+
72
+ @router.post("/refresh", response_model=Token)
73
+ def refresh_token(request: RefreshRequest, db: DbSession):
74
+ """Exchange a valid refresh token for new access + refresh tokens."""
75
+ payload = verify_refresh_token(request.refresh_token)
76
+
77
+ if payload is None:
78
+ raise HTTPException(
79
+ status_code=status.HTTP_401_UNAUTHORIZED,
80
+ detail="Invalid or expired refresh token",
81
+ headers={"WWW-Authenticate": "Bearer"},
82
+ )
83
+
84
+ user_id = payload.get("sub")
85
+ if not user_id:
86
+ raise HTTPException(
87
+ status_code=status.HTTP_401_UNAUTHORIZED,
88
+ detail="Invalid token payload",
89
+ )
90
+
91
+ user = db.query(User).filter(User.id == user_id).first()
92
+ if not user:
93
+ raise HTTPException(
94
+ status_code=status.HTTP_401_UNAUTHORIZED,
95
+ detail="User not found",
96
+ )
97
+
98
+ if not user.is_active:
99
+ raise HTTPException(
100
+ status_code=status.HTTP_403_FORBIDDEN,
101
+ detail="User account is disabled",
102
+ )
103
+
104
+ # Check token version - ensures refresh tokens are invalidated on password change
105
+ token_version = payload.get("token_version")
106
+ if token_version is None or token_version != user.token_version:
107
+ raise HTTPException(
108
+ status_code=status.HTTP_401_UNAUTHORIZED,
109
+ detail="Token has been revoked",
110
+ headers={"WWW-Authenticate": "Bearer"},
111
+ )
112
+
113
+ # Generate new tokens
114
+ return create_token_for_user(db, user)
backend/api/box.py ADDED
@@ -0,0 +1,437 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Box integration API endpoints."""
2
+ from fastapi import APIRouter, HTTPException, status
3
+ from pydantic import BaseModel
4
+ from typing import Optional
5
+
6
+ from backend.config import settings
7
+ from backend.core.dependencies import DbSession, AdminUser
8
+ from backend.services.box_integration import BoxService, is_box_configured
9
+ from backend.services.box_worker import (
10
+ run_box_sync, get_sync_status, update_sync_schedule,
11
+ start_scheduler, stop_scheduler
12
+ )
13
+ from backend.models import AuditLog
14
+
15
+
16
+ router = APIRouter()
17
+
18
+
19
+ def check_demo_mode():
20
+ """Raise an error if running in demo mode."""
21
+ if settings.demo_mode:
22
+ raise HTTPException(
23
+ status_code=status.HTTP_403_FORBIDDEN,
24
+ detail="Box integration is disabled in demo mode. See the feature preview below for what's available in production."
25
+ )
26
+
27
+
28
+ # Request/Response models
29
+ class OAuthCallbackRequest(BaseModel):
30
+ code: str
31
+ state: str
32
+
33
+
34
+ class FolderConfigRequest(BaseModel):
35
+ staging_folder_id: str
36
+ staging_folder_name: str
37
+ processed_folder_id: str
38
+ processed_folder_name: str
39
+ sync_interval_minutes: int = 60
40
+
41
+
42
+ class ConnectionStatus(BaseModel):
43
+ is_configured: bool
44
+ is_connected: bool
45
+ is_active: bool
46
+ box_user_name: Optional[str] = None
47
+ box_user_email: Optional[str] = None
48
+ staging_folder_name: Optional[str] = None
49
+ processed_folder_name: Optional[str] = None
50
+ sync_interval_minutes: int = 60
51
+ last_sync: Optional[str] = None
52
+ last_sync_status: Optional[str] = None
53
+ last_sync_message: Optional[str] = None
54
+ files_processed_count: int = 0
55
+ # Backup config
56
+ backup_folder_id: Optional[str] = None
57
+ backup_folder_name: Optional[str] = None
58
+ backup_enabled: bool = False
59
+ backup_schedule: Optional[str] = None
60
+ backup_time: Optional[str] = None
61
+ last_backup: Optional[str] = None
62
+ last_backup_status: Optional[str] = None
63
+ last_backup_message: Optional[str] = None
64
+
65
+
66
+ class BackupConfigRequest(BaseModel):
67
+ backup_folder_id: str
68
+ backup_folder_name: str
69
+ backup_enabled: bool = True
70
+ backup_schedule: str = "manual" # manual, daily, weekly
71
+ backup_time: Optional[str] = None # HH:MM format for scheduled backups
72
+
73
+
74
+ @router.get("/status")
75
+ def get_box_status(admin: AdminUser, db: DbSession) -> ConnectionStatus:
76
+ """Get Box connection status including backup configuration."""
77
+ box_service = BoxService(db)
78
+ connection = box_service.get_connection()
79
+
80
+ if not connection:
81
+ return ConnectionStatus(
82
+ is_configured=is_box_configured(),
83
+ is_connected=False,
84
+ is_active=False
85
+ )
86
+
87
+ return ConnectionStatus(
88
+ is_configured=is_box_configured(),
89
+ is_connected=True,
90
+ is_active=connection.is_active,
91
+ box_user_name=connection.box_user_name,
92
+ box_user_email=connection.box_user_email,
93
+ staging_folder_name=connection.staging_folder_name,
94
+ processed_folder_name=connection.processed_folder_name,
95
+ sync_interval_minutes=connection.sync_interval_minutes,
96
+ last_sync=connection.last_sync.isoformat() if connection.last_sync else None,
97
+ last_sync_status=connection.last_sync_status,
98
+ last_sync_message=connection.last_sync_message,
99
+ files_processed_count=connection.files_processed_count,
100
+ # Backup fields
101
+ backup_folder_id=connection.backup_folder_id,
102
+ backup_folder_name=connection.backup_folder_name,
103
+ backup_enabled=connection.backup_enabled,
104
+ backup_schedule=connection.backup_schedule,
105
+ backup_time=connection.backup_time,
106
+ last_backup=connection.last_backup.isoformat() if connection.last_backup else None,
107
+ last_backup_status=connection.last_backup_status,
108
+ last_backup_message=connection.last_backup_message
109
+ )
110
+
111
+
112
+ @router.get("/auth-url")
113
+ def get_auth_url(admin: AdminUser, db: DbSession):
114
+ """Get Box OAuth authorization URL."""
115
+ check_demo_mode()
116
+ if not is_box_configured():
117
+ raise HTTPException(
118
+ status_code=status.HTTP_400_BAD_REQUEST,
119
+ detail="Box OAuth not configured. Set BOX_CLIENT_ID and BOX_CLIENT_SECRET in .env"
120
+ )
121
+
122
+ box_service = BoxService(db)
123
+ try:
124
+ result = box_service.get_oauth_url()
125
+ return result
126
+ except ValueError as e:
127
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e))
128
+
129
+
130
+ @router.post("/callback")
131
+ def oauth_callback(request: OAuthCallbackRequest, admin: AdminUser, db: DbSession):
132
+ """Handle Box OAuth callback - exchange code for tokens."""
133
+ box_service = BoxService(db)
134
+
135
+ try:
136
+ connection = box_service.exchange_code(request.code, request.state)
137
+
138
+ # Log audit event
139
+ audit = AuditLog(
140
+ user_id=admin.id,
141
+ action='box_connect',
142
+ resource_type='box_connection',
143
+ resource_id=connection.id,
144
+ details={
145
+ 'box_user_email': connection.box_user_email,
146
+ 'box_user_name': connection.box_user_name
147
+ }
148
+ )
149
+ db.add(audit)
150
+ db.commit()
151
+
152
+ return {
153
+ 'success': True,
154
+ 'message': f'Connected to Box as {connection.box_user_name}',
155
+ 'user_name': connection.box_user_name,
156
+ 'user_email': connection.box_user_email
157
+ }
158
+ except ValueError as e:
159
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e))
160
+
161
+
162
+ @router.get("/folders")
163
+ def list_folders(admin: AdminUser, db: DbSession, folder_id: str = '0'):
164
+ """List folders in Box (for folder picker)."""
165
+ box_service = BoxService(db)
166
+
167
+ try:
168
+ folders = box_service.list_folders(folder_id)
169
+ return {'folders': folders, 'parent_id': folder_id}
170
+ except ValueError as e:
171
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e))
172
+
173
+
174
+ @router.post("/configure")
175
+ def configure_folders(request: FolderConfigRequest, admin: AdminUser, db: DbSession):
176
+ """Configure Box folders for staging and processed files."""
177
+ box_service = BoxService(db)
178
+
179
+ try:
180
+ connection = box_service.update_folder_config(
181
+ staging_folder_id=request.staging_folder_id,
182
+ staging_folder_name=request.staging_folder_name,
183
+ processed_folder_id=request.processed_folder_id,
184
+ processed_folder_name=request.processed_folder_name,
185
+ sync_interval_minutes=request.sync_interval_minutes
186
+ )
187
+
188
+ # Update scheduler with new interval
189
+ update_sync_schedule(request.sync_interval_minutes)
190
+
191
+ # Log audit event
192
+ audit = AuditLog(
193
+ user_id=admin.id,
194
+ action='box_configure',
195
+ resource_type='box_connection',
196
+ resource_id=connection.id,
197
+ details={
198
+ 'staging_folder': request.staging_folder_name,
199
+ 'processed_folder': request.processed_folder_name,
200
+ 'sync_interval': request.sync_interval_minutes
201
+ }
202
+ )
203
+ db.add(audit)
204
+ db.commit()
205
+
206
+ return {
207
+ 'success': True,
208
+ 'message': 'Box folders configured successfully',
209
+ 'is_active': connection.is_active
210
+ }
211
+ except ValueError as e:
212
+ raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e))
213
+
214
+
215
+ @router.post("/disconnect")
216
+ def disconnect_box(admin: AdminUser, db: DbSession):
217
+ """Disconnect Box account."""
218
+ box_service = BoxService(db)
219
+
220
+ # Log before disconnect to capture connection details
221
+ connection = box_service.get_connection()
222
+ if connection:
223
+ audit = AuditLog(
224
+ user_id=admin.id,
225
+ action='box_disconnect',
226
+ resource_type='box_connection',
227
+ resource_id=connection.id,
228
+ details={
229
+ 'box_user_email': connection.box_user_email
230
+ }
231
+ )
232
+ db.add(audit)
233
+
234
+ if box_service.disconnect():
235
+ # Stop scheduler
236
+ stop_scheduler()
237
+ db.commit()
238
+ return {'success': True, 'message': 'Box disconnected'}
239
+
240
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="No Box connection found")
241
+
242
+
243
+ @router.post("/sync")
244
+ def trigger_sync(admin: AdminUser, db: DbSession):
245
+ """Manually trigger Box sync."""
246
+ status_info = get_sync_status()
247
+
248
+ if not status_info.get('is_connected'):
249
+ raise HTTPException(
250
+ status_code=status.HTTP_400_BAD_REQUEST,
251
+ detail="No Box connection available"
252
+ )
253
+
254
+ if not status_info.get('is_active'):
255
+ raise HTTPException(
256
+ status_code=status.HTTP_400_BAD_REQUEST,
257
+ detail="Box connection not active. Please configure folders first."
258
+ )
259
+
260
+ # Log sync trigger
261
+ box_service = BoxService(db)
262
+ connection = box_service.get_connection()
263
+ if connection:
264
+ audit = AuditLog(
265
+ user_id=admin.id,
266
+ action='box_sync_manual',
267
+ resource_type='box_connection',
268
+ resource_id=connection.id
269
+ )
270
+ db.add(audit)
271
+ db.commit()
272
+
273
+ # Run sync
274
+ result = run_box_sync(force=True)
275
+
276
+ return result
277
+
278
+
279
+ @router.get("/sync-status")
280
+ def get_current_sync_status(admin: AdminUser, db: DbSession):
281
+ """Get current sync worker status."""
282
+ return get_sync_status()
283
+
284
+
285
+ @router.get("/logs")
286
+ def get_sync_logs(admin: AdminUser, db: DbSession, limit: int = 20):
287
+ """Get recent sync logs."""
288
+ box_service = BoxService(db)
289
+ logs = box_service.get_sync_logs(limit)
290
+
291
+ return {
292
+ 'logs': [
293
+ {
294
+ 'id': log.id,
295
+ 'started_at': log.started_at.isoformat() if log.started_at else None,
296
+ 'completed_at': log.completed_at.isoformat() if log.completed_at else None,
297
+ 'status': log.status,
298
+ 'files_found': log.files_found,
299
+ 'files_processed': log.files_processed,
300
+ 'files_failed': log.files_failed,
301
+ 'records_imported': log.records_imported,
302
+ 'error_message': log.error_message
303
+ }
304
+ for log in logs
305
+ ]
306
+ }
307
+
308
+
309
+ # ============== Database Backup to Box ==============
310
+
311
+ @router.post("/backup/configure")
312
+ def configure_backup(request: BackupConfigRequest, admin: AdminUser, db: DbSession):
313
+ """Configure database backup to Box."""
314
+ box_service = BoxService(db)
315
+ connection = box_service.get_connection()
316
+
317
+ if not connection:
318
+ raise HTTPException(
319
+ status_code=status.HTTP_400_BAD_REQUEST,
320
+ detail="No Box connection available. Please connect to Box first."
321
+ )
322
+
323
+ # Update backup configuration
324
+ connection.backup_folder_id = request.backup_folder_id
325
+ connection.backup_folder_name = request.backup_folder_name
326
+ connection.backup_enabled = request.backup_enabled
327
+ connection.backup_schedule = request.backup_schedule
328
+ connection.backup_time = request.backup_time
329
+
330
+ db.commit()
331
+
332
+ # Update backup scheduler if enabled
333
+ from backend.services.box_worker import update_backup_schedule
334
+ if request.backup_enabled and request.backup_schedule != "manual":
335
+ update_backup_schedule(request.backup_schedule, request.backup_time)
336
+ else:
337
+ # Disable backup scheduler
338
+ update_backup_schedule(None, None)
339
+
340
+ # Audit log
341
+ audit = AuditLog(
342
+ user_id=admin.id,
343
+ action='backup_configured',
344
+ resource_type='box_connection',
345
+ resource_id=connection.id,
346
+ details={
347
+ 'backup_folder': request.backup_folder_name,
348
+ 'schedule': request.backup_schedule,
349
+ 'enabled': request.backup_enabled
350
+ }
351
+ )
352
+ db.add(audit)
353
+ db.commit()
354
+
355
+ return {
356
+ 'success': True,
357
+ 'message': 'Backup configuration saved',
358
+ 'backup_enabled': connection.backup_enabled,
359
+ 'backup_schedule': connection.backup_schedule
360
+ }
361
+
362
+
363
+ @router.post("/backup/run")
364
+ def run_backup(admin: AdminUser, db: DbSession):
365
+ """Manually trigger database backup to Box."""
366
+ box_service = BoxService(db)
367
+ connection = box_service.get_connection()
368
+
369
+ if not connection:
370
+ raise HTTPException(
371
+ status_code=status.HTTP_400_BAD_REQUEST,
372
+ detail="No Box connection available"
373
+ )
374
+
375
+ if not connection.backup_folder_id:
376
+ raise HTTPException(
377
+ status_code=status.HTTP_400_BAD_REQUEST,
378
+ detail="Backup folder not configured. Please select a backup folder first."
379
+ )
380
+
381
+ # Run backup
382
+ from backend.services.box_worker import run_database_backup
383
+ result = run_database_backup(triggered_by=admin.email)
384
+
385
+ # Audit log
386
+ audit = AuditLog(
387
+ user_id=admin.id,
388
+ action='backup_manual',
389
+ resource_type='database',
390
+ details={
391
+ 'status': result.get('status'),
392
+ 'backup_folder': connection.backup_folder_name,
393
+ 'filename': result.get('filename')
394
+ }
395
+ )
396
+ db.add(audit)
397
+ db.commit()
398
+
399
+ return result
400
+
401
+
402
+ @router.delete("/backup/configure")
403
+ def disable_backup(admin: AdminUser, db: DbSession):
404
+ """Disable database backup to Box."""
405
+ box_service = BoxService(db)
406
+ connection = box_service.get_connection()
407
+
408
+ if not connection:
409
+ raise HTTPException(
410
+ status_code=status.HTTP_404_NOT_FOUND,
411
+ detail="No Box connection found"
412
+ )
413
+
414
+ # Clear backup configuration
415
+ connection.backup_folder_id = None
416
+ connection.backup_folder_name = None
417
+ connection.backup_enabled = False
418
+ connection.backup_schedule = None
419
+ connection.backup_time = None
420
+
421
+ db.commit()
422
+
423
+ # Disable backup scheduler
424
+ from backend.services.box_worker import update_backup_schedule
425
+ update_backup_schedule(None, None)
426
+
427
+ # Audit log
428
+ audit = AuditLog(
429
+ user_id=admin.id,
430
+ action='backup_disabled',
431
+ resource_type='box_connection',
432
+ resource_id=connection.id
433
+ )
434
+ db.add(audit)
435
+ db.commit()
436
+
437
+ return {'success': True, 'message': 'Backup disabled'}
backend/api/groups.py ADDED
@@ -0,0 +1,337 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter, HTTPException, status, Request
2
+ from backend.core.dependencies import DbSession, AdminUser
3
+ from backend.schemas.group import (
4
+ GroupCreate, GroupUpdate, GroupResponse, GroupDetailResponse,
5
+ GroupMemberResponse, GroupSiteResponse,
6
+ AssignUserToGroupRequest, AssignSiteToGroupRequest
7
+ )
8
+ from backend.models import Group, User, Site, UserGroup, GroupSite, AuditLog
9
+
10
+ router = APIRouter()
11
+
12
+
13
+ @router.get("", response_model=list[GroupResponse])
14
+ def list_groups(db: DbSession, admin: AdminUser):
15
+ groups = db.query(Group).all()
16
+ return [
17
+ GroupResponse(
18
+ id=g.id,
19
+ name=g.name,
20
+ description=g.description,
21
+ created_at=g.created_at,
22
+ updated_at=g.updated_at,
23
+ member_count=len(g.members),
24
+ site_count=len(g.sites)
25
+ )
26
+ for g in groups
27
+ ]
28
+
29
+
30
+ @router.post("", response_model=GroupResponse, status_code=status.HTTP_201_CREATED)
31
+ def create_group(group_data: GroupCreate, db: DbSession, admin: AdminUser, request: Request):
32
+ existing = db.query(Group).filter(Group.name == group_data.name).first()
33
+ if existing:
34
+ raise HTTPException(
35
+ status_code=status.HTTP_400_BAD_REQUEST,
36
+ detail="Group with this name already exists"
37
+ )
38
+ group = Group(name=group_data.name, description=group_data.description)
39
+ db.add(group)
40
+ db.commit()
41
+ db.refresh(group)
42
+
43
+ # Audit log
44
+ audit = AuditLog(
45
+ user_id=admin.id,
46
+ action="group_created",
47
+ resource_type="group",
48
+ resource_id=group.id,
49
+ details={"name": group.name, "description": group.description},
50
+ ip_address=request.client.host if request.client else None
51
+ )
52
+ db.add(audit)
53
+ db.commit()
54
+
55
+ return GroupResponse(
56
+ id=group.id,
57
+ name=group.name,
58
+ description=group.description,
59
+ created_at=group.created_at,
60
+ updated_at=group.updated_at,
61
+ member_count=0,
62
+ site_count=0
63
+ )
64
+
65
+
66
+ @router.get("/{group_id}", response_model=GroupDetailResponse)
67
+ def get_group(group_id: str, db: DbSession, admin: AdminUser):
68
+ group = db.query(Group).filter(Group.id == group_id).first()
69
+ if not group:
70
+ raise HTTPException(
71
+ status_code=status.HTTP_404_NOT_FOUND,
72
+ detail="Group not found"
73
+ )
74
+
75
+ members = [
76
+ GroupMemberResponse(
77
+ user_id=ug.user.id,
78
+ email=ug.user.email,
79
+ full_name=ug.user.full_name,
80
+ role=ug.role
81
+ )
82
+ for ug in group.members
83
+ ]
84
+
85
+ sites = [
86
+ GroupSiteResponse(
87
+ site_id=gs.site.id,
88
+ site_code=gs.site.site_code,
89
+ site_name=gs.site.name
90
+ )
91
+ for gs in group.sites
92
+ ]
93
+
94
+ return GroupDetailResponse(
95
+ id=group.id,
96
+ name=group.name,
97
+ description=group.description,
98
+ created_at=group.created_at,
99
+ updated_at=group.updated_at,
100
+ member_count=len(members),
101
+ site_count=len(sites),
102
+ members=members,
103
+ sites=sites
104
+ )
105
+
106
+
107
+ @router.put("/{group_id}", response_model=GroupResponse)
108
+ def update_group(group_id: str, group_data: GroupUpdate, db: DbSession, admin: AdminUser, request: Request):
109
+ group = db.query(Group).filter(Group.id == group_id).first()
110
+ if not group:
111
+ raise HTTPException(
112
+ status_code=status.HTTP_404_NOT_FOUND,
113
+ detail="Group not found"
114
+ )
115
+
116
+ changes = {}
117
+ old_name = group.name
118
+
119
+ if group_data.name is not None:
120
+ # Check for duplicate name
121
+ existing = db.query(Group).filter(Group.name == group_data.name, Group.id != group_id).first()
122
+ if existing:
123
+ raise HTTPException(
124
+ status_code=status.HTTP_400_BAD_REQUEST,
125
+ detail="Group with this name already exists"
126
+ )
127
+ if group_data.name != group.name:
128
+ changes["name"] = {"old": group.name, "new": group_data.name}
129
+ group.name = group_data.name
130
+ if group_data.description is not None:
131
+ if group_data.description != group.description:
132
+ changes["description"] = {"old": group.description, "new": group_data.description}
133
+ group.description = group_data.description
134
+
135
+ db.commit()
136
+ db.refresh(group)
137
+
138
+ # Audit log
139
+ if changes:
140
+ audit = AuditLog(
141
+ user_id=admin.id,
142
+ action="group_updated",
143
+ resource_type="group",
144
+ resource_id=group.id,
145
+ details={"group_name": old_name, "changes": changes},
146
+ ip_address=request.client.host if request.client else None
147
+ )
148
+ db.add(audit)
149
+ db.commit()
150
+
151
+ return GroupResponse(
152
+ id=group.id,
153
+ name=group.name,
154
+ description=group.description,
155
+ created_at=group.created_at,
156
+ updated_at=group.updated_at,
157
+ member_count=len(group.members),
158
+ site_count=len(group.sites)
159
+ )
160
+
161
+
162
+ @router.delete("/{group_id}", status_code=status.HTTP_204_NO_CONTENT)
163
+ def delete_group(group_id: str, db: DbSession, admin: AdminUser, request: Request):
164
+ group = db.query(Group).filter(Group.id == group_id).first()
165
+ if not group:
166
+ raise HTTPException(
167
+ status_code=status.HTTP_404_NOT_FOUND,
168
+ detail="Group not found"
169
+ )
170
+
171
+ # Audit log before deletion
172
+ audit = AuditLog(
173
+ user_id=admin.id,
174
+ action="group_deleted",
175
+ resource_type="group",
176
+ resource_id=group.id,
177
+ details={"name": group.name, "member_count": len(group.members), "site_count": len(group.sites)},
178
+ ip_address=request.client.host if request.client else None
179
+ )
180
+ db.add(audit)
181
+
182
+ db.delete(group)
183
+ db.commit()
184
+
185
+
186
+ @router.post("/{group_id}/users", status_code=status.HTTP_201_CREATED)
187
+ def assign_user_to_group(group_id: str, data: AssignUserToGroupRequest, db: DbSession, admin: AdminUser, request: Request):
188
+ group = db.query(Group).filter(Group.id == group_id).first()
189
+ if not group:
190
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Group not found")
191
+
192
+ user = db.query(User).filter(User.id == data.user_id).first()
193
+ if not user:
194
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
195
+
196
+ existing = db.query(UserGroup).filter(
197
+ UserGroup.user_id == data.user_id,
198
+ UserGroup.group_id == group_id
199
+ ).first()
200
+
201
+ action = "user_role_updated" if existing else "user_assigned_to_group"
202
+ old_role = existing.role if existing else None
203
+
204
+ if existing:
205
+ existing.role = data.role
206
+ else:
207
+ ug = UserGroup(user_id=data.user_id, group_id=group_id, role=data.role)
208
+ db.add(ug)
209
+
210
+ db.commit()
211
+
212
+ # Audit log
213
+ details = {
214
+ "user_email": user.email,
215
+ "group_name": group.name,
216
+ "role": data.role
217
+ }
218
+ if old_role:
219
+ details["old_role"] = old_role
220
+
221
+ audit = AuditLog(
222
+ user_id=admin.id,
223
+ action=action,
224
+ resource_type="user_group",
225
+ resource_id=f"{user.id}:{group.id}",
226
+ details=details,
227
+ ip_address=request.client.host if request.client else None
228
+ )
229
+ db.add(audit)
230
+ db.commit()
231
+
232
+ return {"message": "User assigned to group"}
233
+
234
+
235
+ @router.delete("/{group_id}/users/{user_id}", status_code=status.HTTP_204_NO_CONTENT)
236
+ def remove_user_from_group(group_id: str, user_id: str, db: DbSession, admin: AdminUser, request: Request):
237
+ ug = db.query(UserGroup).filter(
238
+ UserGroup.user_id == user_id,
239
+ UserGroup.group_id == group_id
240
+ ).first()
241
+ if not ug:
242
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not in group")
243
+
244
+ # Get names for audit log
245
+ user = db.query(User).filter(User.id == user_id).first()
246
+ group = db.query(Group).filter(Group.id == group_id).first()
247
+
248
+ # Audit log before deletion
249
+ audit = AuditLog(
250
+ user_id=admin.id,
251
+ action="user_removed_from_group",
252
+ resource_type="user_group",
253
+ resource_id=f"{user_id}:{group_id}",
254
+ details={
255
+ "user_email": user.email if user else user_id,
256
+ "group_name": group.name if group else group_id,
257
+ "role": ug.role
258
+ },
259
+ ip_address=request.client.host if request.client else None
260
+ )
261
+ db.add(audit)
262
+
263
+ db.delete(ug)
264
+ db.commit()
265
+
266
+
267
+ @router.post("/{group_id}/sites", status_code=status.HTTP_201_CREATED)
268
+ def assign_site_to_group(group_id: str, data: AssignSiteToGroupRequest, db: DbSession, admin: AdminUser, request: Request):
269
+ group = db.query(Group).filter(Group.id == group_id).first()
270
+ if not group:
271
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Group not found")
272
+
273
+ site = db.query(Site).filter(Site.id == data.site_id).first()
274
+ if not site:
275
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Site not found")
276
+
277
+ existing = db.query(GroupSite).filter(
278
+ GroupSite.site_id == data.site_id,
279
+ GroupSite.group_id == group_id
280
+ ).first()
281
+
282
+ if existing:
283
+ return {"message": "Site already in group"}
284
+
285
+ gs = GroupSite(site_id=data.site_id, group_id=group_id)
286
+ db.add(gs)
287
+ db.commit()
288
+
289
+ # Audit log
290
+ audit = AuditLog(
291
+ user_id=admin.id,
292
+ action="site_assigned_to_group",
293
+ resource_type="group_site",
294
+ resource_id=f"{group.id}:{site.id}",
295
+ details={
296
+ "site_code": site.site_code,
297
+ "site_name": site.name,
298
+ "group_name": group.name
299
+ },
300
+ ip_address=request.client.host if request.client else None
301
+ )
302
+ db.add(audit)
303
+ db.commit()
304
+
305
+ return {"message": "Site assigned to group"}
306
+
307
+
308
+ @router.delete("/{group_id}/sites/{site_id}", status_code=status.HTTP_204_NO_CONTENT)
309
+ def remove_site_from_group(group_id: str, site_id: str, db: DbSession, admin: AdminUser, request: Request):
310
+ gs = db.query(GroupSite).filter(
311
+ GroupSite.site_id == site_id,
312
+ GroupSite.group_id == group_id
313
+ ).first()
314
+ if not gs:
315
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Site not in group")
316
+
317
+ # Get names for audit log
318
+ site = db.query(Site).filter(Site.id == site_id).first()
319
+ group = db.query(Group).filter(Group.id == group_id).first()
320
+
321
+ # Audit log before deletion
322
+ audit = AuditLog(
323
+ user_id=admin.id,
324
+ action="site_removed_from_group",
325
+ resource_type="group_site",
326
+ resource_id=f"{group_id}:{site_id}",
327
+ details={
328
+ "site_code": site.site_code if site else site_id,
329
+ "site_name": site.name if site else None,
330
+ "group_name": group.name if group else group_id
331
+ },
332
+ ip_address=request.client.host if request.client else None
333
+ )
334
+ db.add(audit)
335
+
336
+ db.delete(gs)
337
+ db.commit()
backend/api/pipeline.py ADDED
@@ -0,0 +1,267 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Pipeline API endpoints for file upload and processing."""
2
+ from fastapi import APIRouter, HTTPException, UploadFile, File, status
3
+ from fastapi.responses import FileResponse, StreamingResponse
4
+ from typing import Optional
5
+ import shutil
6
+ import io
7
+ from datetime import datetime
8
+ from pathlib import Path
9
+
10
+ from backend.core.dependencies import DbSession, AdminUser
11
+ from backend.schemas.pipeline import (
12
+ FileInfo, FileUploadResponse, ProcessingResult,
13
+ PipelineStatus, ProcessRequest, DatabaseExportResponse
14
+ )
15
+ from backend.services import pipeline_worker
16
+ from backend.config import settings
17
+ from backend.models import User, Group, Site, SensorData, AuditLog, FileArchive
18
+
19
+
20
+ router = APIRouter()
21
+
22
+
23
+ @router.get("/status", response_model=PipelineStatus)
24
+ def get_pipeline_status(admin: AdminUser):
25
+ """Get current pipeline status including staging and processed files."""
26
+ status_data = pipeline_worker.get_pipeline_status()
27
+ return PipelineStatus(
28
+ staging_files=[FileInfo(**f) for f in status_data['staging_files']],
29
+ processed_files=[FileInfo(**f) for f in status_data['processed_files']],
30
+ is_processing=status_data['is_processing'],
31
+ last_run=status_data['last_run']
32
+ )
33
+
34
+
35
+ @router.post("/upload", response_model=FileUploadResponse)
36
+ async def upload_file(
37
+ file: UploadFile = File(...),
38
+ admin: AdminUser = None,
39
+ db: DbSession = None
40
+ ):
41
+ """Upload a CSV file to the staging directory."""
42
+ # Validate file type
43
+ if not file.filename or not file.filename.lower().endswith('.csv'):
44
+ raise HTTPException(
45
+ status_code=status.HTTP_400_BAD_REQUEST,
46
+ detail="Only CSV files are allowed"
47
+ )
48
+
49
+ # Read file content
50
+ content = await file.read()
51
+
52
+ # Check file size (max 100MB)
53
+ if len(content) > 100 * 1024 * 1024:
54
+ raise HTTPException(
55
+ status_code=status.HTTP_400_BAD_REQUEST,
56
+ detail="File too large. Maximum size is 100MB"
57
+ )
58
+
59
+ # Save to staging
60
+ filepath = pipeline_worker.save_uploaded_file(content, file.filename)
61
+
62
+ # Log audit event
63
+ audit = AuditLog(
64
+ user_id=admin.id,
65
+ action='file_upload',
66
+ resource_type='pipeline',
67
+ details={'filename': filepath.name, 'size': len(content)}
68
+ )
69
+ db.add(audit)
70
+ db.commit()
71
+
72
+ return FileUploadResponse(
73
+ filename=filepath.name,
74
+ size=len(content),
75
+ message="File uploaded to staging successfully"
76
+ )
77
+
78
+
79
+ @router.post("/process", response_model=list[ProcessingResult])
80
+ def process_files(
81
+ request: ProcessRequest,
82
+ admin: AdminUser,
83
+ db: DbSession
84
+ ):
85
+ """Process files from staging directory."""
86
+ results = pipeline_worker.process_all_staging_files(
87
+ db,
88
+ user_id=admin.id,
89
+ filenames=request.filenames
90
+ )
91
+
92
+ return [ProcessingResult(**r) for r in results]
93
+
94
+
95
+ @router.delete("/staging/{filename}")
96
+ def delete_staging_file(filename: str, admin: AdminUser, db: DbSession):
97
+ """Delete a file from staging directory."""
98
+ if pipeline_worker.delete_staging_file(filename):
99
+ # Log audit
100
+ audit = AuditLog(
101
+ user_id=admin.id,
102
+ action='file_delete',
103
+ resource_type='staging',
104
+ details={'filename': filename}
105
+ )
106
+ db.add(audit)
107
+ db.commit()
108
+ return {"message": f"Deleted {filename} from staging"}
109
+
110
+ raise HTTPException(
111
+ status_code=status.HTTP_404_NOT_FOUND,
112
+ detail="File not found in staging"
113
+ )
114
+
115
+
116
+ @router.delete("/processed/{filename}")
117
+ def delete_processed_file(filename: str, admin: AdminUser, db: DbSession):
118
+ """Delete a file from processed directory."""
119
+ if pipeline_worker.delete_processed_file(filename):
120
+ # Log audit
121
+ audit = AuditLog(
122
+ user_id=admin.id,
123
+ action='file_delete',
124
+ resource_type='processed',
125
+ details={'filename': filename}
126
+ )
127
+ db.add(audit)
128
+ db.commit()
129
+ return {"message": f"Deleted {filename} from processed"}
130
+
131
+ raise HTTPException(
132
+ status_code=status.HTTP_404_NOT_FOUND,
133
+ detail="File not found in processed"
134
+ )
135
+
136
+
137
+ @router.get("/database/export")
138
+ def export_database(admin: AdminUser, db: DbSession):
139
+ """Export the entire database as a downloadable SQLite file."""
140
+ # Get database path
141
+ db_path = Path(settings.database_url.replace("sqlite:///", ""))
142
+
143
+ if not db_path.exists():
144
+ raise HTTPException(
145
+ status_code=status.HTTP_404_NOT_FOUND,
146
+ detail="Database file not found"
147
+ )
148
+
149
+ # Get table counts for metadata - dynamically query all tables
150
+ from sqlalchemy import inspect, text
151
+ inspector = inspect(db.get_bind())
152
+ table_names = inspector.get_table_names()
153
+ table_counts = {}
154
+ for table_name in sorted(table_names):
155
+ result = db.execute(text(f"SELECT COUNT(*) FROM {table_name}"))
156
+ table_counts[table_name] = result.scalar()
157
+
158
+ # Log audit
159
+ audit = AuditLog(
160
+ user_id=admin.id,
161
+ action='database_export',
162
+ resource_type='database',
163
+ details={'table_counts': table_counts}
164
+ )
165
+ db.add(audit)
166
+ db.commit()
167
+
168
+ # Return file as download
169
+ timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
170
+ filename = f"csg_dashboard_backup_{timestamp}.db"
171
+
172
+ return FileResponse(
173
+ path=str(db_path),
174
+ filename=filename,
175
+ media_type='application/octet-stream'
176
+ )
177
+
178
+
179
+ @router.get("/database/stats")
180
+ def get_database_stats(admin: AdminUser, db: DbSession):
181
+ """Get database statistics - dynamically lists all tables."""
182
+ db_path = Path(settings.database_url.replace("sqlite:///", ""))
183
+
184
+ # Dynamically get all table counts
185
+ from sqlalchemy import inspect, text
186
+ inspector = inspect(db.get_bind())
187
+ table_names = inspector.get_table_names()
188
+ tables = {}
189
+ for table_name in sorted(table_names):
190
+ result = db.execute(text(f"SELECT COUNT(*) FROM {table_name}"))
191
+ tables[table_name] = result.scalar()
192
+
193
+ stats = {
194
+ 'file_size': db_path.stat().st_size if db_path.exists() else 0,
195
+ 'tables': tables
196
+ }
197
+ return stats
198
+
199
+
200
+ @router.post("/database/import")
201
+ async def import_database(
202
+ file: UploadFile = File(...),
203
+ admin: AdminUser = None,
204
+ db: DbSession = None
205
+ ):
206
+ """
207
+ Import/restore a database from a backup file.
208
+ WARNING: This will replace ALL existing data!
209
+ Supports both encrypted (SQLCipher) and unencrypted SQLite databases.
210
+ """
211
+ # Validate file type
212
+ if not file.filename or not file.filename.lower().endswith('.db'):
213
+ raise HTTPException(
214
+ status_code=status.HTTP_400_BAD_REQUEST,
215
+ detail="Only .db files are allowed"
216
+ )
217
+
218
+ # Read file content
219
+ content = await file.read()
220
+
221
+ # Validate file size (must be at least 100 bytes for a valid SQLCipher db)
222
+ if len(content) < 100:
223
+ raise HTTPException(
224
+ status_code=status.HTTP_400_BAD_REQUEST,
225
+ detail="File too small to be a valid database"
226
+ )
227
+
228
+ # Only accept encrypted (SQLCipher) databases
229
+ # Unencrypted SQLite files start with "SQLite format 3" - reject these
230
+ if content.startswith(b'SQLite format 3'):
231
+ raise HTTPException(
232
+ status_code=status.HTTP_400_BAD_REQUEST,
233
+ detail="Unencrypted database not allowed. Only encrypted (SQLCipher) backups can be imported."
234
+ )
235
+
236
+ # Get database path
237
+ db_path = Path(settings.database_url.replace("sqlite:///", ""))
238
+
239
+ # Create backup of current database
240
+ backup_path = db_path.parent / f"pre_import_backup_{datetime.now().strftime('%Y%m%d_%H%M%S')}.db"
241
+ if db_path.exists():
242
+ shutil.copy(str(db_path), str(backup_path))
243
+
244
+ # Close current connection (this is important!)
245
+ # Note: In production, you'd want to properly close all connections
246
+ db.close()
247
+
248
+ try:
249
+ # Write new database
250
+ with open(db_path, 'wb') as f:
251
+ f.write(content)
252
+
253
+ return {
254
+ "message": "Database imported successfully",
255
+ "backup_created": str(backup_path),
256
+ "imported_size": len(content),
257
+ "restart_required": True
258
+ }
259
+
260
+ except Exception as e:
261
+ # Restore backup on error
262
+ if backup_path.exists():
263
+ shutil.copy(str(backup_path), str(db_path))
264
+ raise HTTPException(
265
+ status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
266
+ detail=f"Import failed: {str(e)}. Original database restored."
267
+ )
backend/api/sensors.py ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime, timedelta
2
+ from fastapi import APIRouter, HTTPException, Query
3
+ from backend.core.dependencies import DbSession, AccessContext
4
+ from backend.schemas.sensor import SensorDataResponse, SensorDataPoint
5
+ from backend.models import SensorData, Site
6
+
7
+ router = APIRouter()
8
+
9
+
10
+ @router.get("/data", response_model=SensorDataResponse)
11
+ def get_sensor_data(
12
+ db: DbSession,
13
+ access: AccessContext,
14
+ sites: list[str] = Query(..., description="Site codes to query"),
15
+ parameters: list[str] = Query(..., description="Parameter names to retrieve"),
16
+ start: datetime | None = Query(None, description="Start datetime"),
17
+ end: datetime | None = Query(None, description="End datetime")
18
+ ):
19
+ """Get sensor data for specified sites and parameters."""
20
+ # Validate user has access to all requested sites
21
+ for site_code in sites:
22
+ access.require_site_access(site_code=site_code)
23
+
24
+ # Default to last 7 days if no range specified
25
+ if end is None:
26
+ end = datetime.utcnow()
27
+ if start is None:
28
+ start = end - timedelta(days=7)
29
+
30
+ # Get site IDs from codes
31
+ site_records = db.query(Site).filter(Site.site_code.in_(sites)).all()
32
+ site_id_map = {s.site_code: s.id for s in site_records}
33
+ site_ids = list(site_id_map.values())
34
+
35
+ # Query sensor data
36
+ query = db.query(SensorData).filter(
37
+ SensorData.site_id.in_(site_ids),
38
+ SensorData.timestamp >= start,
39
+ SensorData.timestamp <= end
40
+ ).order_by(SensorData.timestamp)
41
+
42
+ data_records = query.all()
43
+
44
+ # Build response
45
+ data_points = []
46
+ for record in data_records:
47
+ # Get site code from site_id
48
+ site_code = next((code for code, sid in site_id_map.items() if sid == record.site_id), None)
49
+ if not site_code:
50
+ continue
51
+
52
+ # Extract requested parameters from JSON data
53
+ values = {}
54
+ for param in parameters:
55
+ values[param] = record.data.get(param)
56
+
57
+ data_points.append(SensorDataPoint(
58
+ timestamp=record.timestamp,
59
+ site_code=site_code,
60
+ values=values
61
+ ))
62
+
63
+ return SensorDataResponse(
64
+ data=data_points,
65
+ sites=sites,
66
+ parameters=parameters,
67
+ count=len(data_points)
68
+ )
69
+
70
+
71
+ @router.get("/latest")
72
+ def get_latest_data(
73
+ db: DbSession,
74
+ access: AccessContext,
75
+ sites: list[str] = Query(..., description="Site codes to query")
76
+ ):
77
+ """Get the most recent data point for each site."""
78
+ # Validate user has access to all requested sites
79
+ for site_code in sites:
80
+ access.require_site_access(site_code=site_code)
81
+
82
+ site_records = db.query(Site).filter(Site.site_code.in_(sites)).all()
83
+ site_id_map = {s.id: s.site_code for s in site_records}
84
+
85
+ results = {}
86
+ for site_id, site_code in site_id_map.items():
87
+ latest = db.query(SensorData).filter(
88
+ SensorData.site_id == site_id
89
+ ).order_by(SensorData.timestamp.desc()).first()
90
+
91
+ if latest:
92
+ results[site_code] = {
93
+ "timestamp": latest.timestamp,
94
+ "data": latest.data
95
+ }
96
+
97
+ return results
backend/api/sites.py ADDED
@@ -0,0 +1,129 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter, HTTPException, status
2
+ from backend.core.dependencies import DbSession, AccessContext
3
+ from backend.schemas.site import SiteResponse, CropResponse, EquipmentGroupResponse, ParameterResponse
4
+ from backend.models import Site, Crop, EquipmentGroup, Parameter
5
+
6
+ router = APIRouter()
7
+
8
+
9
+ @router.get("", response_model=list[SiteResponse])
10
+ def list_sites(db: DbSession, access: AccessContext):
11
+ """List sites accessible to the current user."""
12
+ if access.is_admin:
13
+ sites = db.query(Site).filter(Site.is_active == True).all()
14
+ else:
15
+ if not access.site_ids:
16
+ return []
17
+ sites = db.query(Site).filter(Site.id.in_(access.site_ids)).all()
18
+
19
+ return [
20
+ SiteResponse(
21
+ id=s.id,
22
+ site_code=s.site_code,
23
+ name=s.name,
24
+ crop_id=s.crop_id,
25
+ crop_name=s.crop.display_name if s.crop else None,
26
+ latitude=s.latitude,
27
+ longitude=s.longitude,
28
+ is_active=s.is_active
29
+ )
30
+ for s in sites
31
+ ]
32
+
33
+
34
+ @router.get("/crops", response_model=list[CropResponse])
35
+ def list_crops(db: DbSession, access: AccessContext):
36
+ """List crops that appear in the user's accessible sites."""
37
+ if access.is_admin:
38
+ crops = db.query(Crop).all()
39
+ else:
40
+ if not access.crop_ids:
41
+ return []
42
+ crops = db.query(Crop).filter(Crop.id.in_(access.crop_ids)).all()
43
+
44
+ return [CropResponse.model_validate(c) for c in crops]
45
+
46
+
47
+ @router.get("/equipment-groups", response_model=list[EquipmentGroupResponse])
48
+ def list_equipment_groups(db: DbSession, access: AccessContext, crop_id: str | None = None):
49
+ """List equipment groups for crops accessible to the current user."""
50
+ if access.is_admin:
51
+ query = db.query(EquipmentGroup)
52
+ else:
53
+ if not access.crop_ids:
54
+ return []
55
+ query = db.query(EquipmentGroup).filter(EquipmentGroup.crop_id.in_(access.crop_ids))
56
+
57
+ if crop_id:
58
+ # Validate user has access to this crop
59
+ if not access.is_admin and crop_id not in access.crop_ids:
60
+ return []
61
+ query = query.filter(EquipmentGroup.crop_id == crop_id)
62
+
63
+ groups = query.all()
64
+ return [EquipmentGroupResponse.model_validate(g) for g in groups]
65
+
66
+
67
+ @router.get("/parameters", response_model=list[ParameterResponse])
68
+ def list_parameters(
69
+ db: DbSession,
70
+ access: AccessContext,
71
+ crop_id: str | None = None,
72
+ equipment_group_id: str | None = None
73
+ ):
74
+ """List parameters for crops accessible to the current user."""
75
+ if access.is_admin:
76
+ query = db.query(Parameter)
77
+ else:
78
+ if not access.crop_ids:
79
+ return []
80
+ query = db.query(Parameter).filter(Parameter.crop_id.in_(access.crop_ids))
81
+
82
+ if crop_id:
83
+ # Validate user has access to this crop
84
+ if not access.is_admin and crop_id not in access.crop_ids:
85
+ return []
86
+ query = query.filter(Parameter.crop_id == crop_id)
87
+
88
+ if equipment_group_id:
89
+ query = query.filter(Parameter.equipment_group_id == equipment_group_id)
90
+
91
+ params = query.all()
92
+ return [
93
+ ParameterResponse(
94
+ id=p.id,
95
+ name=p.name,
96
+ display_name=p.display_name,
97
+ unit=p.unit,
98
+ equipment_group_id=p.equipment_group_id,
99
+ equipment_group_name=p.equipment_group.name if p.equipment_group else None,
100
+ min_range=p.min_range,
101
+ max_range=p.max_range
102
+ )
103
+ for p in params
104
+ ]
105
+
106
+
107
+ @router.get("/{site_code}", response_model=SiteResponse)
108
+ def get_site(site_code: str, db: DbSession, access: AccessContext):
109
+ """Get a specific site by code."""
110
+ site = db.query(Site).filter(Site.site_code == site_code).first()
111
+ if not site:
112
+ raise HTTPException(
113
+ status_code=status.HTTP_404_NOT_FOUND,
114
+ detail="Site not found"
115
+ )
116
+
117
+ # Use the access context to validate
118
+ access.require_site_access(site_code=site_code)
119
+
120
+ return SiteResponse(
121
+ id=site.id,
122
+ site_code=site.site_code,
123
+ name=site.name,
124
+ crop_id=site.crop_id,
125
+ crop_name=site.crop.display_name if site.crop else None,
126
+ latitude=site.latitude,
127
+ longitude=site.longitude,
128
+ is_active=site.is_active
129
+ )
backend/api/users.py ADDED
@@ -0,0 +1,227 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter, HTTPException, status, Request
2
+ from backend.core.dependencies import DbSession, AdminUser
3
+ from backend.schemas.user import UserCreate, UserUpdate, UserResponse
4
+ from backend.services.auth import create_user, get_user_by_email
5
+ from backend.models import User, AuditLog
6
+
7
+ router = APIRouter()
8
+
9
+
10
+ @router.get("", response_model=list[UserResponse])
11
+ def list_users(db: DbSession, admin: AdminUser):
12
+ users = db.query(User).all()
13
+ return [
14
+ UserResponse(
15
+ id=u.id,
16
+ email=u.email,
17
+ full_name=u.full_name,
18
+ is_admin=u.is_admin,
19
+ is_active=u.is_active,
20
+ created_at=u.created_at,
21
+ last_login=u.last_login,
22
+ groups=[ug.group.name for ug in u.groups]
23
+ )
24
+ for u in users
25
+ ]
26
+
27
+
28
+ @router.post("", response_model=UserResponse, status_code=status.HTTP_201_CREATED)
29
+ def create_new_user(user_data: UserCreate, db: DbSession, admin: AdminUser, request: Request):
30
+ existing = get_user_by_email(db, user_data.email)
31
+ if existing:
32
+ raise HTTPException(
33
+ status_code=status.HTTP_400_BAD_REQUEST,
34
+ detail="User with this email already exists"
35
+ )
36
+ user = create_user(
37
+ db,
38
+ email=user_data.email,
39
+ password=user_data.password,
40
+ full_name=user_data.full_name,
41
+ is_admin=user_data.is_admin
42
+ )
43
+
44
+ # Audit log
45
+ audit = AuditLog(
46
+ user_id=admin.id,
47
+ action="user_created",
48
+ resource_type="user",
49
+ resource_id=user.id,
50
+ details={"email": user.email, "full_name": user.full_name, "is_admin": user.is_admin},
51
+ ip_address=request.client.host if request.client else None
52
+ )
53
+ db.add(audit)
54
+ db.commit()
55
+
56
+ return UserResponse(
57
+ id=user.id,
58
+ email=user.email,
59
+ full_name=user.full_name,
60
+ is_admin=user.is_admin,
61
+ is_active=user.is_active,
62
+ created_at=user.created_at,
63
+ groups=[]
64
+ )
65
+
66
+
67
+ @router.get("/{user_id}", response_model=UserResponse)
68
+ def get_user(user_id: str, db: DbSession, admin: AdminUser):
69
+ user = db.query(User).filter(User.id == user_id).first()
70
+ if not user:
71
+ raise HTTPException(
72
+ status_code=status.HTTP_404_NOT_FOUND,
73
+ detail="User not found"
74
+ )
75
+ return UserResponse(
76
+ id=user.id,
77
+ email=user.email,
78
+ full_name=user.full_name,
79
+ is_admin=user.is_admin,
80
+ is_active=user.is_active,
81
+ created_at=user.created_at,
82
+ last_login=user.last_login,
83
+ groups=[ug.group.name for ug in user.groups]
84
+ )
85
+
86
+
87
+ @router.put("/{user_id}", response_model=UserResponse)
88
+ def update_user(user_id: str, user_data: UserUpdate, db: DbSession, admin: AdminUser, request: Request):
89
+ user = db.query(User).filter(User.id == user_id).first()
90
+ if not user:
91
+ raise HTTPException(
92
+ status_code=status.HTTP_404_NOT_FOUND,
93
+ detail="User not found"
94
+ )
95
+
96
+ # Track changes for audit logging
97
+ old_is_admin = user.is_admin
98
+ old_is_active = user.is_active
99
+ changes = {}
100
+
101
+ # Prevent deactivating or removing admin status from admin users
102
+ if user.is_admin:
103
+ if user_data.is_active is not None and not user_data.is_active:
104
+ raise HTTPException(
105
+ status_code=status.HTTP_400_BAD_REQUEST,
106
+ detail="Cannot deactivate an admin account. Remove admin privileges first."
107
+ )
108
+ if user_data.is_admin is not None and not user_data.is_admin:
109
+ # Check if this is the last admin
110
+ admin_count = db.query(User).filter(User.is_admin == True, User.is_active == True).count()
111
+ if admin_count <= 1:
112
+ raise HTTPException(
113
+ status_code=status.HTTP_400_BAD_REQUEST,
114
+ detail="Cannot remove admin privileges from the last admin account."
115
+ )
116
+
117
+ if user_data.email is not None and user_data.email != user.email:
118
+ changes["email"] = {"old": user.email, "new": user_data.email}
119
+ user.email = user_data.email
120
+ if user_data.full_name is not None and user_data.full_name != user.full_name:
121
+ changes["full_name"] = {"old": user.full_name, "new": user_data.full_name}
122
+ user.full_name = user_data.full_name
123
+ if user_data.is_admin is not None:
124
+ user.is_admin = user_data.is_admin
125
+ if user_data.is_active is not None:
126
+ user.is_active = user_data.is_active
127
+ if user_data.password is not None:
128
+ from backend.core.security import get_password_hash
129
+ user.password_hash = get_password_hash(user_data.password)
130
+ user.token_version += 1 # Invalidate all existing tokens
131
+
132
+ db.commit()
133
+ db.refresh(user)
134
+
135
+ ip_address = request.client.host if request.client else None
136
+
137
+ # Create specific audit logs for important changes
138
+ if user_data.password is not None:
139
+ audit = AuditLog(
140
+ user_id=admin.id,
141
+ action="password_changed",
142
+ resource_type="user",
143
+ resource_id=user.id,
144
+ details={"target_email": user.email, "changed_by": admin.email},
145
+ ip_address=ip_address
146
+ )
147
+ db.add(audit)
148
+
149
+ if user_data.is_admin is not None and old_is_admin != user.is_admin:
150
+ action = "admin_granted" if user.is_admin else "admin_revoked"
151
+ audit = AuditLog(
152
+ user_id=admin.id,
153
+ action=action,
154
+ resource_type="user",
155
+ resource_id=user.id,
156
+ details={"target_email": user.email},
157
+ ip_address=ip_address
158
+ )
159
+ db.add(audit)
160
+
161
+ if user_data.is_active is not None and old_is_active != user.is_active:
162
+ action = "user_activated" if user.is_active else "user_deactivated"
163
+ audit = AuditLog(
164
+ user_id=admin.id,
165
+ action=action,
166
+ resource_type="user",
167
+ resource_id=user.id,
168
+ details={"target_email": user.email},
169
+ ip_address=ip_address
170
+ )
171
+ db.add(audit)
172
+
173
+ # General update log
174
+ if changes:
175
+ audit = AuditLog(
176
+ user_id=admin.id,
177
+ action="user_updated",
178
+ resource_type="user",
179
+ resource_id=user.id,
180
+ details={"target_email": user.email, "changes": changes},
181
+ ip_address=ip_address
182
+ )
183
+ db.add(audit)
184
+
185
+ db.commit()
186
+
187
+ return UserResponse(
188
+ id=user.id,
189
+ email=user.email,
190
+ full_name=user.full_name,
191
+ is_admin=user.is_admin,
192
+ is_active=user.is_active,
193
+ created_at=user.created_at,
194
+ last_login=user.last_login,
195
+ groups=[ug.group.name for ug in user.groups]
196
+ )
197
+
198
+
199
+ @router.delete("/{user_id}", status_code=status.HTTP_204_NO_CONTENT)
200
+ def delete_user(user_id: str, db: DbSession, admin: AdminUser, request: Request):
201
+ user = db.query(User).filter(User.id == user_id).first()
202
+ if not user:
203
+ raise HTTPException(
204
+ status_code=status.HTTP_404_NOT_FOUND,
205
+ detail="User not found"
206
+ )
207
+
208
+ # Prevent deleting admin accounts
209
+ if user.is_admin:
210
+ raise HTTPException(
211
+ status_code=status.HTTP_400_BAD_REQUEST,
212
+ detail="Cannot delete an admin account. Remove admin privileges first."
213
+ )
214
+
215
+ # Audit log before deletion
216
+ audit = AuditLog(
217
+ user_id=admin.id,
218
+ action="user_deleted",
219
+ resource_type="user",
220
+ resource_id=user.id,
221
+ details={"email": user.email, "full_name": user.full_name},
222
+ ip_address=request.client.host if request.client else None
223
+ )
224
+ db.add(audit)
225
+
226
+ db.delete(user)
227
+ db.commit()
backend/config.py ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from pathlib import Path
3
+ from pydantic_settings import BaseSettings
4
+ from functools import lru_cache
5
+
6
+
7
+ class Settings(BaseSettings):
8
+ # App settings
9
+ app_name: str = "CSG Flux Dashboard"
10
+ debug: bool = False
11
+
12
+ # Deployment mode - True for HuggingFace demo deployment
13
+ # Data resets on restart, uses synthetic demo data
14
+ demo_mode: bool = True
15
+
16
+ # Security
17
+ secret_key: str = "dev-secret-key-change-in-production-min-32-chars"
18
+ refresh_secret_key: str = "dev-refresh-secret-key-change-in-production-min-32-chars"
19
+ algorithm: str = "HS256"
20
+ access_token_expire_minutes: int = 30 # 30 minutes
21
+ refresh_token_expire_days: int = 7 # 7 days
22
+
23
+ # Database - Use /data for HF Spaces persistent storage
24
+ # DATABASE_URL env var is set in Dockerfile for HF Spaces
25
+ database_url: str = "sqlite:///./data/crop_dashboard.db"
26
+ db_encryption_key: str | None = None # SQLCipher encryption key
27
+
28
+ # Paths - Detect HF Spaces environment and use /data for persistence
29
+ base_dir: Path = Path(__file__).parent.parent
30
+
31
+ @property
32
+ def is_hf_spaces(self) -> bool:
33
+ """Check if running in HuggingFace Spaces environment."""
34
+ return os.path.exists("/data") and os.access("/data", os.W_OK)
35
+
36
+ @property
37
+ def data_dir(self) -> Path:
38
+ return Path("/data") if self.is_hf_spaces else self.base_dir / "data"
39
+
40
+ @property
41
+ def uploads_dir(self) -> Path:
42
+ return Path("/data/uploads") if self.is_hf_spaces else self.base_dir / "uploads"
43
+
44
+ @property
45
+ def archives_dir(self) -> Path:
46
+ return Path("/data/archives") if self.is_hf_spaces else self.base_dir / "archives"
47
+
48
+ # Initial admin (created on first run)
49
+ # ADMIN_PASSWORD must be set as an environment variable/secret for security
50
+ admin_email: str = "admin@cropdash.dev"
51
+ admin_password: str = "" # Set via ADMIN_PASSWORD env var
52
+
53
+ # CORS - parse from comma-separated env var if provided
54
+ cors_origins: list[str] = ["http://localhost:5173", "http://127.0.0.1:5173"]
55
+
56
+ @property
57
+ def all_cors_origins(self) -> list[str]:
58
+ """Return all CORS origins including HF Spaces URL when deployed."""
59
+ origins = list(self.cors_origins)
60
+ # Add HF Spaces URL when running in that environment
61
+ if os.environ.get("SPACE_ID"):
62
+ space_id = os.environ.get("SPACE_ID", "")
63
+ # SPACE_ID format is "username/space-name"
64
+ if "/" in space_id:
65
+ username, space_name = space_id.split("/", 1)
66
+ hf_url = f"https://{username}-{space_name}.hf.space"
67
+ if hf_url not in origins:
68
+ origins.append(hf_url)
69
+ # Always include the known HF Spaces URL
70
+ hf_space_url = "https://richtext-crop-dashboard-platform.hf.space"
71
+ if hf_space_url not in origins:
72
+ origins.append(hf_space_url)
73
+ return origins
74
+
75
+ # Box OAuth Configuration
76
+ # Get these from https://app.box.com/developers/console
77
+ box_client_id: str = ""
78
+ box_client_secret: str = ""
79
+ box_redirect_uri: str = "http://localhost:5173/admin/box/callback"
80
+
81
+ class Config:
82
+ env_file = ".env"
83
+ env_file_encoding = "utf-8"
84
+
85
+ @classmethod
86
+ def parse_cors_origins(cls, v):
87
+ """Parse CORS_ORIGINS from comma-separated string if needed."""
88
+ if isinstance(v, str):
89
+ return [origin.strip() for origin in v.split(',') if origin.strip()]
90
+ return v
91
+
92
+
93
+ @lru_cache()
94
+ def get_settings() -> Settings:
95
+ return Settings()
96
+
97
+
98
+ settings = get_settings()
99
+
100
+ # Ensure directories exist (use try/except for permission issues)
101
+ try:
102
+ settings.data_dir.mkdir(parents=True, exist_ok=True)
103
+ settings.uploads_dir.mkdir(parents=True, exist_ok=True)
104
+ settings.archives_dir.mkdir(parents=True, exist_ok=True)
105
+ except PermissionError:
106
+ pass # Directories may already exist or be managed by the platform
backend/core/__init__.py ADDED
File without changes
backend/core/access_control.py ADDED
@@ -0,0 +1,165 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Centralized Access Control Module
3
+
4
+ This module provides a single source of truth for all access control logic.
5
+ It computes a user's accessible resources ONCE per request and caches them
6
+ in a UserAccessContext object that can be injected into any endpoint.
7
+
8
+ Usage in endpoints:
9
+ from backend.core.dependencies import DbSession, AccessContext
10
+
11
+ @router.get("/data")
12
+ def get_data(access: AccessContext, db: DbSession):
13
+ if not access.is_admin:
14
+ # Filter by user's accessible sites
15
+ query = query.filter(Model.site_id.in_(access.site_ids))
16
+ return query.all()
17
+
18
+ Adding new data types:
19
+ 1. If data is site-scoped: use access.site_ids or access.site_codes
20
+ 2. If data is crop-scoped: use access.crop_ids
21
+ 3. If data needs new scope: add new field to UserAccessContext
22
+ """
23
+
24
+ from dataclasses import dataclass
25
+ from typing import Annotated
26
+ from fastapi import Depends, HTTPException, status
27
+ from sqlalchemy.orm import Session
28
+
29
+ from backend.database import get_db
30
+ from backend.core.dependencies import get_current_user
31
+ from backend.models import User, UserGroup, GroupSite, Site
32
+
33
+
34
+ @dataclass
35
+ class UserAccessContext:
36
+ """
37
+ Cached access context for a user request.
38
+
39
+ This object is created ONCE per request and contains all the information
40
+ needed to filter data by the user's group memberships.
41
+
42
+ Attributes:
43
+ user: The authenticated User object
44
+ is_admin: Whether user has admin privileges (bypasses all filters)
45
+ site_ids: Set of Site.id values the user can access
46
+ site_codes: Set of Site.site_code values the user can access
47
+ crop_ids: Set of Crop.id values from the user's accessible sites
48
+ """
49
+ user: User
50
+ is_admin: bool
51
+ site_ids: set[str]
52
+ site_codes: set[str]
53
+ crop_ids: set[str]
54
+
55
+ def has_site_access(self, site_id: str = None, site_code: str = None) -> bool:
56
+ """Check if user has access to a specific site."""
57
+ if self.is_admin:
58
+ return True
59
+ if site_id:
60
+ return site_id in self.site_ids
61
+ if site_code:
62
+ return site_code in self.site_codes
63
+ return False
64
+
65
+ def has_crop_access(self, crop_id: str) -> bool:
66
+ """Check if user has access to a specific crop (via their sites)."""
67
+ if self.is_admin:
68
+ return True
69
+ return crop_id in self.crop_ids
70
+
71
+ def require_site_access(self, site_id: str = None, site_code: str = None) -> None:
72
+ """Raise 403 if user doesn't have access to the site."""
73
+ if not self.has_site_access(site_id=site_id, site_code=site_code):
74
+ raise HTTPException(
75
+ status_code=status.HTTP_403_FORBIDDEN,
76
+ detail="You don't have access to this site"
77
+ )
78
+
79
+ def require_crop_access(self, crop_id: str) -> None:
80
+ """Raise 403 if user doesn't have access to the crop."""
81
+ if not self.has_crop_access(crop_id):
82
+ raise HTTPException(
83
+ status_code=status.HTTP_403_FORBIDDEN,
84
+ detail="You don't have access to this crop"
85
+ )
86
+
87
+
88
+ def get_access_context(
89
+ user: Annotated[User, Depends(get_current_user)],
90
+ db: Annotated[Session, Depends(get_db)]
91
+ ) -> UserAccessContext:
92
+ """
93
+ FastAPI dependency that creates a UserAccessContext for the current request.
94
+
95
+ This is computed ONCE per request and cached by FastAPI's dependency system.
96
+ All endpoints that inject AccessContext will receive the same instance.
97
+
98
+ For admins: Returns context with is_admin=True (no filtering needed)
99
+ For regular users: Queries their group memberships and accessible sites
100
+ """
101
+ if user.is_admin:
102
+ # Admin users see everything - query all active sites
103
+ sites = db.query(Site).filter(Site.is_active == True).all()
104
+ site_ids = {s.id for s in sites}
105
+ site_codes = {s.site_code for s in sites}
106
+ crop_ids = {s.crop_id for s in sites if s.crop_id}
107
+
108
+ return UserAccessContext(
109
+ user=user,
110
+ is_admin=True,
111
+ site_ids=site_ids,
112
+ site_codes=site_codes,
113
+ crop_ids=crop_ids
114
+ )
115
+
116
+ # Regular user - filter by group membership
117
+ # Step 1: Get user's group IDs
118
+ user_groups = db.query(UserGroup).filter(UserGroup.user_id == user.id).all()
119
+ group_ids = [ug.group_id for ug in user_groups]
120
+
121
+ if not group_ids:
122
+ # User has no groups - empty access
123
+ return UserAccessContext(
124
+ user=user,
125
+ is_admin=False,
126
+ site_ids=set(),
127
+ site_codes=set(),
128
+ crop_ids=set()
129
+ )
130
+
131
+ # Step 2: Get site IDs from those groups
132
+ group_sites = db.query(GroupSite).filter(GroupSite.group_id.in_(group_ids)).all()
133
+ site_ids_list = [gs.site_id for gs in group_sites]
134
+
135
+ if not site_ids_list:
136
+ # Groups have no sites assigned
137
+ return UserAccessContext(
138
+ user=user,
139
+ is_admin=False,
140
+ site_ids=set(),
141
+ site_codes=set(),
142
+ crop_ids=set()
143
+ )
144
+
145
+ # Step 3: Get actual site objects (only active ones)
146
+ sites = db.query(Site).filter(
147
+ Site.id.in_(site_ids_list),
148
+ Site.is_active == True
149
+ ).all()
150
+
151
+ site_ids = {s.id for s in sites}
152
+ site_codes = {s.site_code for s in sites}
153
+ crop_ids = {s.crop_id for s in sites if s.crop_id}
154
+
155
+ return UserAccessContext(
156
+ user=user,
157
+ is_admin=False,
158
+ site_ids=site_ids,
159
+ site_codes=site_codes,
160
+ crop_ids=crop_ids
161
+ )
162
+
163
+
164
+ # Type alias for dependency injection - use this in endpoint signatures
165
+ AccessContext = Annotated[UserAccessContext, Depends(get_access_context)]
backend/core/dependencies.py ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Annotated
2
+ from fastapi import Depends, HTTPException, status
3
+ from fastapi.security import OAuth2PasswordBearer
4
+ from sqlalchemy.orm import Session
5
+ from backend.database import get_db
6
+ from backend.core.security import verify_token
7
+ from backend.models import User
8
+
9
+ oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/api/auth/login")
10
+
11
+
12
+ def get_current_user(
13
+ token: Annotated[str, Depends(oauth2_scheme)],
14
+ db: Annotated[Session, Depends(get_db)]
15
+ ) -> User:
16
+ payload = verify_token(token)
17
+
18
+ if payload is None:
19
+ raise HTTPException(
20
+ status_code=status.HTTP_401_UNAUTHORIZED,
21
+ detail="Invalid or expired token",
22
+ headers={"WWW-Authenticate": "Bearer"},
23
+ )
24
+
25
+ user_id = payload.get("sub")
26
+ if not user_id or not isinstance(user_id, str):
27
+ raise HTTPException(
28
+ status_code=status.HTTP_401_UNAUTHORIZED,
29
+ detail="Invalid token payload",
30
+ headers={"WWW-Authenticate": "Bearer"},
31
+ )
32
+
33
+ user = db.query(User).filter(User.id == user_id).first()
34
+ if user is None:
35
+ raise HTTPException(
36
+ status_code=status.HTTP_401_UNAUTHORIZED,
37
+ detail="User not found",
38
+ )
39
+
40
+ # Check token version - invalidates tokens after password change/logout
41
+ token_version = payload.get("token_version")
42
+ if token_version is None or token_version != user.token_version:
43
+ raise HTTPException(
44
+ status_code=status.HTTP_401_UNAUTHORIZED,
45
+ detail="Token has been revoked",
46
+ headers={"WWW-Authenticate": "Bearer"},
47
+ )
48
+
49
+ if not user.is_active:
50
+ raise HTTPException(
51
+ status_code=status.HTTP_403_FORBIDDEN,
52
+ detail="User account is disabled",
53
+ )
54
+
55
+ return user
56
+
57
+
58
+ def get_current_admin_user(
59
+ current_user: Annotated[User, Depends(get_current_user)]
60
+ ) -> User:
61
+ if not current_user.is_admin:
62
+ raise HTTPException(
63
+ status_code=status.HTTP_403_FORBIDDEN,
64
+ detail="Admin access required",
65
+ )
66
+ return current_user
67
+
68
+
69
+ # Type aliases for cleaner route signatures
70
+ CurrentUser = Annotated[User, Depends(get_current_user)]
71
+ AdminUser = Annotated[User, Depends(get_current_admin_user)]
72
+ DbSession = Annotated[Session, Depends(get_db)]
73
+
74
+ # Re-export access control for convenience
75
+ from backend.core.access_control import AccessContext, UserAccessContext
backend/core/security.py ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime, timedelta
2
+ from typing import Any
3
+ from jose import jwt, JWTError
4
+ from passlib.context import CryptContext
5
+ from backend.config import settings
6
+
7
+ pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
8
+
9
+
10
+ def create_access_token(data: dict[str, Any], expires_delta: timedelta | None = None) -> str:
11
+ to_encode = data.copy()
12
+ if expires_delta:
13
+ expire = datetime.utcnow() + expires_delta
14
+ else:
15
+ expire = datetime.utcnow() + timedelta(minutes=settings.access_token_expire_minutes)
16
+ to_encode.update({"exp": expire})
17
+ encoded_jwt = jwt.encode(to_encode, settings.secret_key, algorithm=settings.algorithm)
18
+ return encoded_jwt
19
+
20
+
21
+ def verify_token(token: str) -> dict[str, Any] | None:
22
+ try:
23
+ payload = jwt.decode(token, settings.secret_key, algorithms=[settings.algorithm])
24
+ return payload
25
+ except JWTError:
26
+ return None
27
+
28
+
29
+ def create_refresh_token(data: dict[str, Any], expires_delta: timedelta | None = None) -> str:
30
+ """Create a refresh token with longer expiry and different secret."""
31
+ to_encode = data.copy()
32
+ if expires_delta:
33
+ expire = datetime.utcnow() + expires_delta
34
+ else:
35
+ expire = datetime.utcnow() + timedelta(days=settings.refresh_token_expire_days)
36
+ to_encode.update({"exp": expire, "type": "refresh"})
37
+ encoded_jwt = jwt.encode(to_encode, settings.refresh_secret_key, algorithm=settings.algorithm)
38
+ return encoded_jwt
39
+
40
+
41
+ def verify_refresh_token(token: str) -> dict[str, Any] | None:
42
+ """Verify a refresh token using the refresh secret key."""
43
+ try:
44
+ payload = jwt.decode(token, settings.refresh_secret_key, algorithms=[settings.algorithm])
45
+ if payload.get("type") != "refresh":
46
+ return None
47
+ return payload
48
+ except JWTError:
49
+ return None
50
+
51
+
52
+ def verify_password(plain_password: str, hashed_password: str) -> bool:
53
+ return pwd_context.verify(plain_password, hashed_password)
54
+
55
+
56
+ def get_password_hash(password: str) -> str:
57
+ return pwd_context.hash(password)
backend/database.py ADDED
@@ -0,0 +1,144 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Database module with SQLCipher encryption support.
3
+
4
+ When DB_ENCRYPTION_KEY is set in the environment (or .env file), the database
5
+ will be encrypted using SQLCipher. If no key is provided, a standard SQLite
6
+ database is used (for development only).
7
+ """
8
+ import os
9
+ from sqlalchemy import create_engine, event, text
10
+ from sqlalchemy.orm import sessionmaker, DeclarativeBase
11
+ from backend.config import settings
12
+
13
+ # Check if SQLCipher is available
14
+ SQLCIPHER_AVAILABLE = False
15
+ try:
16
+ import sqlcipher3
17
+ SQLCIPHER_AVAILABLE = True
18
+ except ImportError:
19
+ pass
20
+
21
+
22
+ def _get_db_path() -> str:
23
+ """Extract the database file path from the database URL."""
24
+ import os
25
+ from pathlib import Path
26
+
27
+ # Check if running in HF Spaces (writable /data directory)
28
+ is_hf_spaces = os.path.exists("/data") and os.access("/data", os.W_OK)
29
+
30
+ if is_hf_spaces:
31
+ # Use HF Spaces persistent storage
32
+ db_path = "/data/crop_dashboard.db"
33
+ else:
34
+ # Parse from settings for local development
35
+ url = settings.database_url
36
+ if url.startswith("sqlite:///"):
37
+ db_path = url[10:] # Remove "sqlite:///"
38
+ if db_path.startswith("./"):
39
+ db_path = db_path[2:]
40
+ else:
41
+ db_path = url
42
+
43
+ # Ensure directory exists
44
+ db_dir = Path(db_path).parent
45
+ try:
46
+ db_dir.mkdir(parents=True, exist_ok=True)
47
+ except PermissionError:
48
+ pass # Directory may already exist
49
+
50
+ return db_path
51
+
52
+
53
+ def _create_encrypted_engine():
54
+ """Create an engine using SQLCipher for encrypted database access."""
55
+ if not SQLCIPHER_AVAILABLE:
56
+ raise RuntimeError(
57
+ "SQLCipher encryption requested but sqlcipher3 is not installed. "
58
+ "Install with: pip install sqlcipher3"
59
+ )
60
+
61
+ db_path = _get_db_path()
62
+ key = settings.db_encryption_key
63
+
64
+ def create_connection():
65
+ """Create a new SQLCipher connection."""
66
+ conn = sqlcipher3.connect(db_path, check_same_thread=False)
67
+ # Set the encryption key
68
+ conn.execute(f"PRAGMA key = '{key}'")
69
+ # Use SQLCipher 4 defaults for strong encryption
70
+ conn.execute("PRAGMA cipher_compatibility = 4")
71
+ conn.execute("PRAGMA kdf_iter = 256000")
72
+ conn.execute("PRAGMA cipher_memory_security = ON")
73
+ return conn
74
+
75
+ # Create engine with creator function
76
+ # Note: SQLite with creator uses StaticPool instead of QueuePool
77
+ from sqlalchemy.pool import StaticPool
78
+ engine = create_engine(
79
+ "sqlite://", # Dummy URL, we override with creator
80
+ creator=create_connection,
81
+ poolclass=StaticPool
82
+ )
83
+
84
+ return engine
85
+
86
+
87
+ def _create_standard_engine():
88
+ """Create a standard SQLite engine (no encryption)."""
89
+ engine = create_engine(
90
+ settings.database_url,
91
+ connect_args={"check_same_thread": False}
92
+ )
93
+ return engine
94
+
95
+
96
+ def create_db_engine():
97
+ """
98
+ Create the appropriate database engine based on configuration.
99
+
100
+ If db_encryption_key is set, uses SQLCipher for encryption.
101
+ Otherwise, uses standard SQLite.
102
+ """
103
+ if settings.db_encryption_key:
104
+ print("Database encryption enabled (SQLCipher)")
105
+ return _create_encrypted_engine()
106
+ else:
107
+ print("WARNING: Database is NOT encrypted (no DB_ENCRYPTION_KEY set)")
108
+ return _create_standard_engine()
109
+
110
+
111
+ # Create the engine
112
+ engine = create_db_engine()
113
+
114
+ SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
115
+
116
+
117
+ class Base(DeclarativeBase):
118
+ pass
119
+
120
+
121
+ def get_db():
122
+ db = SessionLocal()
123
+ try:
124
+ yield db
125
+ finally:
126
+ db.close()
127
+
128
+
129
+ def init_db():
130
+ from backend.models import user, group, site, sensor_data, pipeline, box_connection
131
+ Base.metadata.create_all(bind=engine)
132
+
133
+
134
+ def is_database_encrypted(db_path: str | None = None) -> bool:
135
+ """Check if the database file is encrypted."""
136
+ path = db_path or _get_db_path()
137
+ if not os.path.exists(path):
138
+ return False
139
+
140
+ with open(path, 'rb') as f:
141
+ header = f.read(16)
142
+
143
+ # SQLite files start with "SQLite format 3\0"
144
+ return header != b'SQLite format 3\x00'
backend/main.py ADDED
@@ -0,0 +1,441 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from contextlib import asynccontextmanager
3
+ from fastapi import FastAPI
4
+ from fastapi.middleware.cors import CORSMiddleware
5
+ from fastapi.staticfiles import StaticFiles
6
+ from fastapi.responses import FileResponse
7
+ from pathlib import Path
8
+
9
+ from backend.config import settings
10
+ from backend.database import init_db, engine
11
+ from backend.api import api_router
12
+
13
+
14
+ @asynccontextmanager
15
+ async def lifespan(app: FastAPI):
16
+ # Startup
17
+ if settings.demo_mode:
18
+ print("=" * 50)
19
+ print(" RUNNING IN DEMO MODE")
20
+ print(" Data will reset on restart")
21
+ print("=" * 50)
22
+ # In demo mode, always delete and recreate the database
23
+ db_path = Path(settings.database_url.replace("sqlite:///", ""))
24
+ if db_path.exists():
25
+ db_path.unlink()
26
+ print(f"Deleted existing database: {db_path}")
27
+
28
+ init_db()
29
+ seed_initial_data()
30
+
31
+ # Initialize Box scheduler if connection exists
32
+ from backend.services.box_worker import initialize_box_scheduler, stop_scheduler
33
+ try:
34
+ initialize_box_scheduler()
35
+ except Exception as e:
36
+ print(f"Warning: Failed to initialize Box scheduler: {e}")
37
+
38
+ yield
39
+
40
+ # Shutdown
41
+ try:
42
+ stop_scheduler()
43
+ except Exception:
44
+ pass
45
+
46
+
47
+ app = FastAPI(
48
+ title=settings.app_name,
49
+ description="Agricultural sensor data dashboard for crop monitoring",
50
+ version="1.0.0",
51
+ lifespan=lifespan
52
+ )
53
+
54
+ # CORS middleware - use all_cors_origins to include HF Spaces URL when deployed
55
+ app.add_middleware(
56
+ CORSMiddleware,
57
+ allow_origins=settings.all_cors_origins,
58
+ allow_credentials=True,
59
+ allow_methods=["*"],
60
+ allow_headers=["*"],
61
+ )
62
+
63
+ # API routes
64
+ app.include_router(api_router, prefix="/api")
65
+
66
+
67
+ def seed_initial_data():
68
+ """Seed the database with initial admin user and sample data.
69
+
70
+ This creates fake demo data with randomized California locations.
71
+ All site names, locations, and sensor data are synthetic and not real.
72
+ """
73
+ import random
74
+ from datetime import datetime, timedelta
75
+ from backend.database import SessionLocal
76
+ from backend.models import User, Crop, Site, Group, GroupSite, EquipmentGroup, Parameter, SensorData
77
+ from backend.models import UserGroup
78
+ from backend.services.auth import get_user_by_email, create_user
79
+
80
+ db = SessionLocal()
81
+ try:
82
+ # Check if database is empty (for production mode)
83
+ user_count = db.query(User).count()
84
+ if user_count > 0 and not settings.demo_mode:
85
+ print("Database already seeded, skipping...")
86
+ return
87
+
88
+ # Create admin user if not exists (password from env var)
89
+ admin = get_user_by_email(db, settings.admin_email)
90
+ if not admin and settings.admin_password:
91
+ admin = create_user(
92
+ db,
93
+ email=settings.admin_email,
94
+ password=settings.admin_password,
95
+ full_name="System Administrator",
96
+ is_admin=True
97
+ )
98
+ print(f"Created admin user: {settings.admin_email}")
99
+ elif not settings.admin_password:
100
+ print("Warning: ADMIN_PASSWORD not set, skipping admin user creation")
101
+
102
+ # Create crops if not exist
103
+ crops_data = [
104
+ {"name": "almonds", "display_name": "Almonds", "color": "#3B82F6"},
105
+ {"name": "grapes", "display_name": "Grapes", "color": "#8B5CF6"},
106
+ {"name": "olives", "display_name": "Olives", "color": "#EF4444"},
107
+ {"name": "pistachios", "display_name": "Pistachios", "color": "#22C55E"},
108
+ {"name": "table_grapes", "display_name": "Table Grapes", "color": "#F59E0B"},
109
+ ]
110
+
111
+ crop_map = {}
112
+ for crop_data in crops_data:
113
+ crop = db.query(Crop).filter(Crop.name == crop_data["name"]).first()
114
+ if not crop:
115
+ crop = Crop(**crop_data)
116
+ db.add(crop)
117
+ db.commit()
118
+ db.refresh(crop)
119
+ crop_map[crop_data["name"]] = crop
120
+
121
+ # Create 10 FAKE sites with randomized California locations
122
+ # These are NOT real locations - just random points in California for demo purposes
123
+ sites_data = [
124
+ # Grower One sites (Site 01-05)
125
+ {"site_code": "SITE_01", "name": "Demo Site 01 - North Valley", "crop": "almonds", "lat": 35.42, "lon": -119.85},
126
+ {"site_code": "SITE_02", "name": "Demo Site 02 - Central Plains", "crop": "almonds", "lat": 36.15, "lon": -120.32},
127
+ {"site_code": "SITE_03", "name": "Demo Site 03 - West Ridge", "crop": "grapes", "lat": 34.78, "lon": -118.95},
128
+ {"site_code": "SITE_04", "name": "Demo Site 04 - South Basin", "crop": "olives", "lat": 35.89, "lon": -119.42},
129
+ {"site_code": "SITE_05", "name": "Demo Site 05 - East Hills", "crop": "pistachios", "lat": 36.52, "lon": -118.78},
130
+ # Grower Two sites (Site 06-10)
131
+ {"site_code": "SITE_06", "name": "Demo Site 06 - River Delta", "crop": "grapes", "lat": 34.25, "lon": -119.15},
132
+ {"site_code": "SITE_07", "name": "Demo Site 07 - Mountain View", "crop": "olives", "lat": 35.65, "lon": -120.88},
133
+ {"site_code": "SITE_08", "name": "Demo Site 08 - Coastal Range", "crop": "table_grapes", "lat": 36.85, "lon": -119.55},
134
+ {"site_code": "SITE_09", "name": "Demo Site 09 - Valley Floor", "crop": "almonds", "lat": 34.95, "lon": -118.62},
135
+ {"site_code": "SITE_10", "name": "Demo Site 10 - Highland Farm", "crop": "pistachios", "lat": 35.35, "lon": -120.15},
136
+ ]
137
+
138
+ site_objects = {}
139
+ for site_data in sites_data:
140
+ site = db.query(Site).filter(Site.site_code == site_data["site_code"]).first()
141
+ if not site:
142
+ site = Site(
143
+ site_code=site_data["site_code"],
144
+ name=site_data["name"],
145
+ crop_id=crop_map[site_data["crop"]].id,
146
+ latitude=site_data["lat"],
147
+ longitude=site_data["lon"]
148
+ )
149
+ db.add(site)
150
+ db.commit()
151
+ db.refresh(site)
152
+ site_objects[site_data["site_code"]] = site
153
+
154
+ print(f"Created {len(site_objects)} demo sites")
155
+
156
+ # Create equipment groups and parameters for each crop
157
+ equipment_groups_data = [
158
+ "Sonic Anemometer", "IRGASON", "Net Radiometer", "Soil Thermocouple",
159
+ "Soil Moisture Probes", "Energy Budget", "EC100 Probe", "Infrared Sensor"
160
+ ]
161
+
162
+ # Parameters with their equipment group assignments
163
+ parameters_data = [
164
+ ("TA_1_1_1", "EC100 Probe", "Air Temperature", "C"),
165
+ ("RH_1_1_1", "EC100 Probe", "Relative Humidity", "%"),
166
+ ("VPD", "EC100 Probe", "Vapor Pressure Deficit", "kPa"),
167
+ ("WS", "Sonic Anemometer", "Wind Speed", "m/s"),
168
+ ("WD", "Sonic Anemometer", "Wind Direction", "deg"),
169
+ ("USTAR", "Sonic Anemometer", "Friction Velocity", "m/s"),
170
+ ("T_SONIC", "Sonic Anemometer", "Sonic Temperature", "C"),
171
+ ("SW_IN", "Net Radiometer", "Incoming Shortwave", "W/m2"),
172
+ ("SW_OUT", "Net Radiometer", "Outgoing Shortwave", "W/m2"),
173
+ ("LW_IN", "Net Radiometer", "Incoming Longwave", "W/m2"),
174
+ ("LW_OUT", "Net Radiometer", "Outgoing Longwave", "W/m2"),
175
+ ("NETRAD", "Net Radiometer", "Net Radiation", "W/m2"),
176
+ ("TS1_2cm", "Soil Thermocouple", "Soil Temp 2cm", "C"),
177
+ ("TS1_6cm", "Soil Thermocouple", "Soil Temp 6cm", "C"),
178
+ ("SWC_1_1_1", "Soil Moisture Probes", "Soil Water Content", "%"),
179
+ ("H", "Energy Budget", "Sensible Heat Flux", "W/m2"),
180
+ ("LE", "Energy Budget", "Latent Heat Flux", "W/m2"),
181
+ ("G", "Energy Budget", "Ground Heat Flux", "W/m2"),
182
+ ("FC_mass", "IRGASON", "CO2 Flux", "mg/m2/s"),
183
+ ("ET", "IRGASON", "Evapotranspiration", "mm/hr"),
184
+ ("CO2_density", "IRGASON", "CO2 Density", "mg/m3"),
185
+ ("H2O_density", "IRGASON", "H2O Density", "g/m3"),
186
+ ("T_CANOPY", "Infrared Sensor", "Canopy Temperature", "C"),
187
+ ]
188
+
189
+ for crop_name, crop in crop_map.items():
190
+ # Create equipment groups for this crop
191
+ eq_map = {}
192
+ for eq_name in equipment_groups_data:
193
+ existing = db.query(EquipmentGroup).filter(
194
+ EquipmentGroup.name == eq_name,
195
+ EquipmentGroup.crop_id == crop.id
196
+ ).first()
197
+ if not existing:
198
+ eq = EquipmentGroup(name=eq_name, crop_id=crop.id)
199
+ db.add(eq)
200
+ db.flush()
201
+ eq_map[eq_name] = eq.id
202
+ else:
203
+ eq_map[eq_name] = existing.id
204
+
205
+ # Create parameters for this crop
206
+ for param_name, eq_name, display_name, unit in parameters_data:
207
+ existing = db.query(Parameter).filter(
208
+ Parameter.name == param_name,
209
+ Parameter.crop_id == crop.id
210
+ ).first()
211
+ if not existing:
212
+ param = Parameter(
213
+ name=param_name,
214
+ display_name=display_name,
215
+ unit=unit,
216
+ crop_id=crop.id,
217
+ equipment_group_id=eq_map.get(eq_name)
218
+ )
219
+ db.add(param)
220
+ db.commit()
221
+
222
+ print("Created equipment groups and parameters for all crops")
223
+
224
+ # Create "Grower One Farms" group (Sites 01-05)
225
+ grower_one_group = db.query(Group).filter(Group.name == "Grower One Farms").first()
226
+ if not grower_one_group:
227
+ grower_one_group = Group(
228
+ name="Grower One Farms",
229
+ description="Demo group for Grower One with access to Sites 01-05"
230
+ )
231
+ db.add(grower_one_group)
232
+ db.commit()
233
+ db.refresh(grower_one_group)
234
+
235
+ # Add sites 01-05 to Grower One group
236
+ for code in ["SITE_01", "SITE_02", "SITE_03", "SITE_04", "SITE_05"]:
237
+ if code in site_objects:
238
+ gs = GroupSite(group_id=grower_one_group.id, site_id=site_objects[code].id)
239
+ db.add(gs)
240
+ db.commit()
241
+ print("Created Grower One Farms group with 5 sites")
242
+
243
+ # Create "Grower Two Farms" group (Sites 06-10)
244
+ grower_two_group = db.query(Group).filter(Group.name == "Grower Two Farms").first()
245
+ if not grower_two_group:
246
+ grower_two_group = Group(
247
+ name="Grower Two Farms",
248
+ description="Demo group for Grower Two with access to Sites 06-10"
249
+ )
250
+ db.add(grower_two_group)
251
+ db.commit()
252
+ db.refresh(grower_two_group)
253
+
254
+ # Add sites 06-10 to Grower Two group
255
+ for code in ["SITE_06", "SITE_07", "SITE_08", "SITE_09", "SITE_10"]:
256
+ if code in site_objects:
257
+ gs = GroupSite(group_id=grower_two_group.id, site_id=site_objects[code].id)
258
+ db.add(gs)
259
+ db.commit()
260
+ print("Created Grower Two Farms group with 5 sites")
261
+
262
+ # Create Grower One user
263
+ grower1 = get_user_by_email(db, "grower1@demo.cropdash.dev")
264
+ if not grower1:
265
+ grower1 = create_user(
266
+ db,
267
+ email="grower1@demo.cropdash.dev",
268
+ password="demo123",
269
+ full_name="Grower One",
270
+ is_admin=False
271
+ )
272
+ ug = UserGroup(user_id=grower1.id, group_id=grower_one_group.id, role="viewer")
273
+ db.add(ug)
274
+ db.commit()
275
+ print("Created demo user: grower1@demo.cropdash.dev")
276
+
277
+ # Create Grower Two user
278
+ grower2 = get_user_by_email(db, "grower2@demo.cropdash.dev")
279
+ if not grower2:
280
+ grower2 = create_user(
281
+ db,
282
+ email="grower2@demo.cropdash.dev",
283
+ password="demo123",
284
+ full_name="Grower Two",
285
+ is_admin=False
286
+ )
287
+ ug = UserGroup(user_id=grower2.id, group_id=grower_two_group.id, role="viewer")
288
+ db.add(ug)
289
+ db.commit()
290
+ print("Created demo user: grower2@demo.cropdash.dev")
291
+
292
+ # Generate fake sensor data for all sites
293
+ print("\nGenerating synthetic sensor data...")
294
+ generate_fake_sensor_data(db, site_objects)
295
+
296
+ print("\nDatabase seeded successfully with demo data")
297
+
298
+ finally:
299
+ db.close()
300
+
301
+
302
+ def generate_fake_sensor_data(db, site_objects: dict):
303
+ """Generate synthetic sensor data for demo purposes.
304
+
305
+ All data is randomly generated and does not represent real measurements.
306
+ """
307
+ import random
308
+ from datetime import datetime, timedelta
309
+ from backend.models import SensorData
310
+
311
+ # Generate data for the last 30 days, every 30 minutes
312
+ end_time = datetime.now()
313
+ start_time = end_time - timedelta(days=30)
314
+
315
+ # Parameter ranges for realistic-looking fake data
316
+ param_ranges = {
317
+ "TA_1_1_1": (10, 40), # Air temp C
318
+ "RH_1_1_1": (20, 95), # Relative humidity %
319
+ "VPD": (0.5, 4.0), # Vapor pressure deficit kPa
320
+ "WS": (0.1, 8.0), # Wind speed m/s
321
+ "WD": (0, 360), # Wind direction deg
322
+ "USTAR": (0.05, 0.8), # Friction velocity m/s
323
+ "T_SONIC": (10, 42), # Sonic temp C
324
+ "SW_IN": (0, 900), # Incoming shortwave W/m2
325
+ "SW_OUT": (0, 150), # Outgoing shortwave W/m2
326
+ "LW_IN": (250, 420), # Incoming longwave W/m2
327
+ "LW_OUT": (300, 550), # Outgoing longwave W/m2
328
+ "NETRAD": (-100, 700), # Net radiation W/m2
329
+ "TS1_2cm": (12, 35), # Soil temp 2cm C
330
+ "TS1_6cm": (14, 32), # Soil temp 6cm C
331
+ "SWC_1_1_1": (5, 55), # Soil water content %
332
+ "H": (-50, 350), # Sensible heat W/m2
333
+ "LE": (-20, 400), # Latent heat W/m2
334
+ "G": (-80, 200), # Ground heat W/m2
335
+ "FC_mass": (-1.5, 1.5), # CO2 flux mg/m2/s
336
+ "ET": (-0.2, 1.5), # ET mm/hr
337
+ "CO2_density": (680, 850), # CO2 density mg/m3
338
+ "H2O_density": (5, 18), # H2O density g/m3
339
+ "T_CANOPY": (8, 45), # Canopy temp C
340
+ }
341
+
342
+ total_records = 0
343
+ for site_code, site in site_objects.items():
344
+ current_time = start_time
345
+ batch = []
346
+ record_num = 1
347
+
348
+ # Add some site-specific variation
349
+ site_offset = random.uniform(-3, 3)
350
+
351
+ while current_time <= end_time:
352
+ # Calculate hour of day for diurnal patterns
353
+ hour = current_time.hour
354
+ is_daytime = 6 <= hour <= 18
355
+
356
+ # Generate data with diurnal patterns
357
+ data = {}
358
+ for param, (min_val, max_val) in param_ranges.items():
359
+ base = random.uniform(min_val, max_val)
360
+
361
+ # Add diurnal variation for relevant parameters
362
+ if param in ["SW_IN", "NETRAD", "H", "LE", "ET", "T_CANOPY", "TA_1_1_1"]:
363
+ if is_daytime:
364
+ # Higher during day, peak around noon
365
+ hour_factor = 1 - abs(hour - 12) / 12
366
+ base = min_val + (max_val - min_val) * (0.3 + 0.7 * hour_factor)
367
+ else:
368
+ base = min_val + (max_val - min_val) * 0.1
369
+
370
+ # Add some noise and site variation
371
+ value = base + site_offset + random.gauss(0, (max_val - min_val) * 0.05)
372
+ value = max(min_val, min(max_val, value)) # Clamp to range
373
+ data[param] = round(value, 4)
374
+
375
+ sensor_data = SensorData(
376
+ site_id=site.id,
377
+ timestamp=current_time,
378
+ data=data,
379
+ record_number=record_num
380
+ )
381
+ batch.append(sensor_data)
382
+ record_num += 1
383
+
384
+ # Batch insert every 500 records
385
+ if len(batch) >= 500:
386
+ db.bulk_save_objects(batch)
387
+ db.commit()
388
+ total_records += len(batch)
389
+ batch = []
390
+
391
+ current_time += timedelta(minutes=30)
392
+
393
+ # Insert remaining records
394
+ if batch:
395
+ db.bulk_save_objects(batch)
396
+ db.commit()
397
+ total_records += len(batch)
398
+
399
+ print(f" Generated data for {site_code}")
400
+
401
+ print(f" Total synthetic records: {total_records}")
402
+
403
+
404
+ @app.get("/health")
405
+ def health_check():
406
+ return {"status": "healthy", "app": settings.app_name, "demo_mode": settings.demo_mode}
407
+
408
+
409
+ @app.get("/api/config/mode")
410
+ def get_config_mode():
411
+ """Return configuration mode for frontend."""
412
+ return {"demo_mode": settings.demo_mode}
413
+
414
+
415
+ # Serve static files (frontend) if they exist - must be after all API routes
416
+ static_dir = Path(__file__).parent / "static"
417
+ if static_dir.exists():
418
+ # Serve static assets directory
419
+ assets_dir = static_dir / "assets"
420
+ if assets_dir.exists():
421
+ app.mount("/assets", StaticFiles(directory=assets_dir), name="assets")
422
+
423
+ # Catch-all route for SPA - must be last
424
+ @app.get("/{full_path:path}")
425
+ async def serve_spa(full_path: str):
426
+ """Serve frontend SPA files."""
427
+ # Try to serve the exact file first
428
+ file_path = static_dir / full_path
429
+ if file_path.exists() and file_path.is_file():
430
+ return FileResponse(file_path)
431
+ # Return index.html for all other routes (SPA routing)
432
+ index_path = static_dir / "index.html"
433
+ if index_path.exists():
434
+ return FileResponse(index_path)
435
+ # Fallback
436
+ return {"error": "Not found"}
437
+
438
+
439
+ if __name__ == "__main__":
440
+ import uvicorn
441
+ uvicorn.run("backend.main:app", host="0.0.0.0", port=8000, reload=True)
backend/models/__init__.py ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from backend.models.user import User, UserGroup
2
+ from backend.models.group import Group
3
+ from backend.models.site import Crop, Site, GroupSite, EquipmentGroup, Parameter
4
+ from backend.models.sensor_data import SensorData
5
+ from backend.models.pipeline import FileArchive, PipelineConfig, AuditLog
6
+ from backend.models.box_connection import BoxConnection, BoxSyncLog
7
+
8
+ __all__ = [
9
+ "User", "UserGroup", "Group", "Crop", "Site", "GroupSite",
10
+ "EquipmentGroup", "Parameter", "SensorData", "FileArchive",
11
+ "PipelineConfig", "AuditLog", "BoxConnection", "BoxSyncLog"
12
+ ]
backend/models/box_connection.py ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Box cloud storage connection model."""
2
+ import uuid
3
+ from datetime import datetime
4
+ from sqlalchemy import String, DateTime, Integer, Boolean, Text, JSON
5
+ from sqlalchemy.orm import Mapped, mapped_column
6
+ from backend.database import Base
7
+
8
+
9
+ class BoxConnection(Base):
10
+ """Store Box OAuth tokens and folder configuration."""
11
+ __tablename__ = "box_connections"
12
+
13
+ id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
14
+ name: Mapped[str] = mapped_column(String(100), default="Box Connection")
15
+
16
+ # OAuth tokens (should be encrypted in production)
17
+ access_token: Mapped[str | None] = mapped_column(Text, nullable=True)
18
+ refresh_token: Mapped[str | None] = mapped_column(Text, nullable=True)
19
+ token_expires_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
20
+
21
+ # Box user info
22
+ box_user_id: Mapped[str | None] = mapped_column(String(50), nullable=True)
23
+ box_user_name: Mapped[str | None] = mapped_column(String(255), nullable=True)
24
+ box_user_email: Mapped[str | None] = mapped_column(String(255), nullable=True)
25
+
26
+ # Folder configuration
27
+ staging_folder_id: Mapped[str | None] = mapped_column(String(50), nullable=True)
28
+ staging_folder_name: Mapped[str | None] = mapped_column(String(255), nullable=True)
29
+ processed_folder_id: Mapped[str | None] = mapped_column(String(50), nullable=True)
30
+ processed_folder_name: Mapped[str | None] = mapped_column(String(255), nullable=True)
31
+
32
+ # Sync settings
33
+ is_active: Mapped[bool] = mapped_column(Boolean, default=False)
34
+ sync_interval_minutes: Mapped[int] = mapped_column(Integer, default=60)
35
+ last_sync: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
36
+ last_sync_status: Mapped[str | None] = mapped_column(String(50), nullable=True) # success, error, in_progress
37
+ last_sync_message: Mapped[str | None] = mapped_column(Text, nullable=True)
38
+ files_processed_count: Mapped[int] = mapped_column(Integer, default=0)
39
+
40
+ # Database backup settings
41
+ backup_folder_id: Mapped[str | None] = mapped_column(String(50), nullable=True)
42
+ backup_folder_name: Mapped[str | None] = mapped_column(String(255), nullable=True)
43
+ backup_enabled: Mapped[bool] = mapped_column(Boolean, default=False)
44
+ backup_schedule: Mapped[str | None] = mapped_column(String(50), nullable=True) # manual, daily, weekly
45
+ backup_time: Mapped[str | None] = mapped_column(String(10), nullable=True) # HH:MM format for scheduled backups
46
+ last_backup: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
47
+ last_backup_status: Mapped[str | None] = mapped_column(String(50), nullable=True)
48
+ last_backup_message: Mapped[str | None] = mapped_column(Text, nullable=True)
49
+
50
+ # Timestamps
51
+ created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
52
+ updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
53
+
54
+
55
+ class BoxSyncLog(Base):
56
+ """Log of Box sync operations."""
57
+ __tablename__ = "box_sync_logs"
58
+
59
+ id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
60
+ connection_id: Mapped[str] = mapped_column(String(36), nullable=False, index=True)
61
+ started_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
62
+ completed_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
63
+ status: Mapped[str] = mapped_column(String(50), default="in_progress") # in_progress, success, error
64
+ files_found: Mapped[int] = mapped_column(Integer, default=0)
65
+ files_processed: Mapped[int] = mapped_column(Integer, default=0)
66
+ files_failed: Mapped[int] = mapped_column(Integer, default=0)
67
+ records_imported: Mapped[int] = mapped_column(Integer, default=0)
68
+ error_message: Mapped[str | None] = mapped_column(Text, nullable=True)
69
+ details: Mapped[dict | None] = mapped_column(JSON, nullable=True)
backend/models/group.py ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import uuid
2
+ from datetime import datetime
3
+ from sqlalchemy import String, DateTime, Text
4
+ from sqlalchemy.orm import Mapped, mapped_column, relationship
5
+ from backend.database import Base
6
+
7
+
8
+ class Group(Base):
9
+ __tablename__ = "groups"
10
+
11
+ id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
12
+ name: Mapped[str] = mapped_column(String(255), unique=True, nullable=False)
13
+ description: Mapped[str | None] = mapped_column(Text, nullable=True)
14
+ created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
15
+ updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
16
+
17
+ # Relationships
18
+ members: Mapped[list["UserGroup"]] = relationship("UserGroup", back_populates="group", cascade="all, delete-orphan")
19
+ sites: Mapped[list["GroupSite"]] = relationship("GroupSite", back_populates="group", cascade="all, delete-orphan")
backend/models/pipeline.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import uuid
2
+ from datetime import datetime
3
+ from sqlalchemy import String, DateTime, Integer, ForeignKey, Text, JSON
4
+ from sqlalchemy.orm import Mapped, mapped_column
5
+ from backend.database import Base
6
+
7
+
8
+ class FileArchive(Base):
9
+ __tablename__ = "file_archives"
10
+
11
+ id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
12
+ original_filename: Mapped[str] = mapped_column(String(255), nullable=False)
13
+ file_hash: Mapped[str] = mapped_column(String(64), nullable=False, index=True) # SHA256
14
+ file_size: Mapped[int | None] = mapped_column(Integer, nullable=True)
15
+ upload_date: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
16
+ processed_date: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
17
+ status: Mapped[str] = mapped_column(String(50), default="pending") # pending, processing, completed, error
18
+ error_message: Mapped[str | None] = mapped_column(Text, nullable=True)
19
+ records_imported: Mapped[int] = mapped_column(Integer, default=0)
20
+ archived_path: Mapped[str | None] = mapped_column(String(500), nullable=True)
21
+
22
+
23
+ class PipelineConfig(Base):
24
+ __tablename__ = "pipeline_config"
25
+
26
+ key: Mapped[str] = mapped_column(String(100), primary_key=True)
27
+ value: Mapped[str] = mapped_column(Text, nullable=False)
28
+ description: Mapped[str | None] = mapped_column(Text, nullable=True)
29
+ updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
30
+ updated_by: Mapped[str | None] = mapped_column(String(36), ForeignKey("users.id"), nullable=True)
31
+
32
+
33
+ class AuditLog(Base):
34
+ __tablename__ = "audit_log"
35
+
36
+ id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
37
+ user_id: Mapped[str | None] = mapped_column(String(36), ForeignKey("users.id"), nullable=True)
38
+ action: Mapped[str] = mapped_column(String(100), nullable=False)
39
+ resource_type: Mapped[str | None] = mapped_column(String(100), nullable=True)
40
+ resource_id: Mapped[str | None] = mapped_column(String(36), nullable=True)
41
+ details: Mapped[dict | None] = mapped_column(JSON, nullable=True)
42
+ ip_address: Mapped[str | None] = mapped_column(String(45), nullable=True)
43
+ created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, index=True)
backend/models/sensor_data.py ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+ from sqlalchemy import String, DateTime, Integer, ForeignKey, JSON, Index
3
+ from sqlalchemy.orm import Mapped, mapped_column, relationship
4
+ from backend.database import Base
5
+
6
+
7
+ class SensorData(Base):
8
+ __tablename__ = "sensor_data"
9
+
10
+ id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
11
+ site_id: Mapped[str] = mapped_column(String(36), ForeignKey("sites.id"), nullable=False, index=True)
12
+ timestamp: Mapped[datetime] = mapped_column(DateTime, nullable=False, index=True)
13
+ data: Mapped[dict] = mapped_column(JSON, nullable=False) # All parameter values as JSON
14
+ record_number: Mapped[int | None] = mapped_column(Integer, nullable=True)
15
+ created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
16
+
17
+ # Relationships
18
+ site: Mapped["Site"] = relationship("Site", back_populates="sensor_data")
19
+
20
+ # Composite index for efficient time-range queries
21
+ __table_args__ = (
22
+ Index("idx_sensor_data_site_timestamp", "site_id", "timestamp", unique=True),
23
+ )
backend/models/site.py ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import uuid
2
+ from datetime import datetime
3
+ from sqlalchemy import String, DateTime, Float, Boolean, ForeignKey, Text
4
+ from sqlalchemy.orm import Mapped, mapped_column, relationship
5
+ from backend.database import Base
6
+
7
+
8
+ class Crop(Base):
9
+ __tablename__ = "crops"
10
+
11
+ id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
12
+ name: Mapped[str] = mapped_column(String(100), unique=True, nullable=False) # e.g., "almonds"
13
+ display_name: Mapped[str] = mapped_column(String(100), nullable=False) # e.g., "Almonds"
14
+ color: Mapped[str] = mapped_column(String(20), default="#3B82F6") # Hex color for map markers
15
+
16
+ # Relationships
17
+ sites: Mapped[list["Site"]] = relationship("Site", back_populates="crop")
18
+ equipment_groups: Mapped[list["EquipmentGroup"]] = relationship("EquipmentGroup", back_populates="crop")
19
+ parameters: Mapped[list["Parameter"]] = relationship("Parameter", back_populates="crop")
20
+
21
+
22
+ class Site(Base):
23
+ __tablename__ = "sites"
24
+
25
+ id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
26
+ site_code: Mapped[str] = mapped_column(String(50), unique=True, nullable=False, index=True) # e.g., "OLA_001"
27
+ name: Mapped[str] = mapped_column(String(255), nullable=False)
28
+ crop_id: Mapped[str] = mapped_column(String(36), ForeignKey("crops.id"), nullable=False)
29
+ latitude: Mapped[float] = mapped_column(Float, nullable=False)
30
+ longitude: Mapped[float] = mapped_column(Float, nullable=False)
31
+ is_active: Mapped[bool] = mapped_column(Boolean, default=True)
32
+ created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
33
+ updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
34
+
35
+ # Relationships
36
+ crop: Mapped["Crop"] = relationship("Crop", back_populates="sites")
37
+ groups: Mapped[list["GroupSite"]] = relationship("GroupSite", back_populates="site", cascade="all, delete-orphan")
38
+ sensor_data: Mapped[list["SensorData"]] = relationship("SensorData", back_populates="site", cascade="all, delete-orphan")
39
+
40
+
41
+ class GroupSite(Base):
42
+ __tablename__ = "group_sites"
43
+
44
+ group_id: Mapped[str] = mapped_column(String(36), ForeignKey("groups.id", ondelete="CASCADE"), primary_key=True)
45
+ site_id: Mapped[str] = mapped_column(String(36), ForeignKey("sites.id", ondelete="CASCADE"), primary_key=True)
46
+
47
+ # Relationships
48
+ group: Mapped["Group"] = relationship("Group", back_populates="sites")
49
+ site: Mapped["Site"] = relationship("Site", back_populates="groups")
50
+
51
+
52
+ class EquipmentGroup(Base):
53
+ __tablename__ = "equipment_groups"
54
+
55
+ id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
56
+ name: Mapped[str] = mapped_column(String(255), nullable=False) # e.g., "Soil Thermocouple"
57
+ crop_id: Mapped[str] = mapped_column(String(36), ForeignKey("crops.id"), nullable=False)
58
+
59
+ # Relationships
60
+ crop: Mapped["Crop"] = relationship("Crop", back_populates="equipment_groups")
61
+ parameters: Mapped[list["Parameter"]] = relationship("Parameter", back_populates="equipment_group")
62
+
63
+
64
+ class Parameter(Base):
65
+ __tablename__ = "parameters"
66
+
67
+ id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
68
+ name: Mapped[str] = mapped_column(String(100), nullable=False) # e.g., "TS1_2cm"
69
+ display_name: Mapped[str | None] = mapped_column(String(255), nullable=True)
70
+ unit: Mapped[str | None] = mapped_column(String(50), nullable=True)
71
+ equipment_group_id: Mapped[str | None] = mapped_column(String(36), ForeignKey("equipment_groups.id"), nullable=True)
72
+ crop_id: Mapped[str] = mapped_column(String(36), ForeignKey("crops.id"), nullable=False)
73
+ min_range: Mapped[float | None] = mapped_column(Float, nullable=True)
74
+ max_range: Mapped[float | None] = mapped_column(Float, nullable=True)
75
+
76
+ # Relationships
77
+ equipment_group: Mapped["EquipmentGroup | None"] = relationship("EquipmentGroup", back_populates="parameters")
78
+ crop: Mapped["Crop"] = relationship("Crop", back_populates="parameters")
backend/models/user.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import uuid
2
+ from datetime import datetime
3
+ from sqlalchemy import String, Boolean, DateTime, ForeignKey, Text, Integer
4
+ from sqlalchemy.orm import Mapped, mapped_column, relationship
5
+ from backend.database import Base
6
+
7
+
8
+ class User(Base):
9
+ __tablename__ = "users"
10
+
11
+ id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid.uuid4()))
12
+ email: Mapped[str] = mapped_column(String(255), unique=True, nullable=False, index=True)
13
+ password_hash: Mapped[str] = mapped_column(Text, nullable=False)
14
+ full_name: Mapped[str] = mapped_column(String(255), nullable=False)
15
+ is_admin: Mapped[bool] = mapped_column(Boolean, default=False)
16
+ is_active: Mapped[bool] = mapped_column(Boolean, default=True)
17
+ created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
18
+ updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
19
+ last_login: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
20
+ token_version: Mapped[int] = mapped_column(Integer, default=1)
21
+
22
+ # Relationships
23
+ groups: Mapped[list["UserGroup"]] = relationship("UserGroup", back_populates="user", cascade="all, delete-orphan")
24
+
25
+
26
+ class UserGroup(Base):
27
+ __tablename__ = "user_groups"
28
+
29
+ user_id: Mapped[str] = mapped_column(String(36), ForeignKey("users.id", ondelete="CASCADE"), primary_key=True)
30
+ group_id: Mapped[str] = mapped_column(String(36), ForeignKey("groups.id", ondelete="CASCADE"), primary_key=True)
31
+ role: Mapped[str] = mapped_column(String(50), default="viewer") # viewer, editor, manager
32
+ created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow)
33
+
34
+ # Relationships
35
+ user: Mapped["User"] = relationship("User", back_populates="groups")
36
+ group: Mapped["Group"] = relationship("Group", back_populates="members")
backend/requirements.txt ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ fastapi==0.109.0
2
+ uvicorn[standard]==0.27.0
3
+ sqlalchemy==2.0.25
4
+ pydantic[email]==2.5.3
5
+ pydantic-settings==2.1.0
6
+ python-jose[cryptography]==3.3.0
7
+ passlib==1.7.4
8
+ bcrypt==4.0.1
9
+ python-multipart==0.0.6
10
+ pandas==2.2.0
11
+ numpy==1.26.3
12
+ aiofiles==23.2.1
13
+ boxsdk==3.9.2
14
+ apscheduler==3.10.4
15
+ sqlcipher3==0.5.3
backend/schemas/__init__.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from backend.schemas.user import UserCreate, UserUpdate, UserResponse, UserInDB
2
+ from backend.schemas.auth import Token, TokenPayload, LoginRequest
3
+ from backend.schemas.site import SiteResponse, CropResponse, ParameterResponse
4
+ from backend.schemas.sensor import SensorDataQuery, SensorDataResponse
5
+
6
+ __all__ = [
7
+ "UserCreate", "UserUpdate", "UserResponse", "UserInDB",
8
+ "Token", "TokenPayload", "LoginRequest",
9
+ "SiteResponse", "CropResponse", "ParameterResponse",
10
+ "SensorDataQuery", "SensorDataResponse"
11
+ ]
backend/schemas/admin.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+ from pydantic import BaseModel
3
+
4
+
5
+ class AuditLogResponse(BaseModel):
6
+ id: int
7
+ user_id: str | None
8
+ user_email: str | None = None
9
+ action: str
10
+ resource_type: str | None
11
+ resource_id: str | None
12
+ details: dict | None
13
+ ip_address: str | None
14
+ created_at: datetime
15
+
16
+ class Config:
17
+ from_attributes = True
18
+
19
+
20
+ class SystemStatsResponse(BaseModel):
21
+ total_users: int
22
+ active_users: int
23
+ total_groups: int
24
+ total_sites: int
25
+ active_sites: int
26
+ total_sensor_records: int
27
+ total_parameters: int
28
+ total_equipment_groups: int
29
+
30
+
31
+ class SiteCreate(BaseModel):
32
+ site_code: str
33
+ name: str
34
+ crop_id: str
35
+ latitude: float
36
+ longitude: float
37
+
38
+
39
+ class SiteUpdate(BaseModel):
40
+ site_code: str | None = None
41
+ name: str | None = None
42
+ crop_id: str | None = None
43
+ latitude: float | None = None
44
+ longitude: float | None = None
45
+ is_active: bool | None = None
backend/schemas/auth.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pydantic import BaseModel, EmailStr
2
+
3
+
4
+ class LoginRequest(BaseModel):
5
+ email: EmailStr
6
+ password: str
7
+
8
+
9
+ class Token(BaseModel):
10
+ access_token: str
11
+ refresh_token: str
12
+ token_type: str = "bearer"
13
+
14
+
15
+ class RefreshRequest(BaseModel):
16
+ refresh_token: str
17
+
18
+
19
+ class TokenPayload(BaseModel):
20
+ sub: str # user_id
21
+ email: str
22
+ is_admin: bool
23
+ groups: list[str] # group_ids
24
+ sites: list[str] # site_codes the user can access
25
+ exp: int
backend/schemas/group.py ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+ from pydantic import BaseModel
3
+
4
+
5
+ class GroupBase(BaseModel):
6
+ name: str
7
+ description: str | None = None
8
+
9
+
10
+ class GroupCreate(GroupBase):
11
+ pass
12
+
13
+
14
+ class GroupUpdate(BaseModel):
15
+ name: str | None = None
16
+ description: str | None = None
17
+
18
+
19
+ class GroupMemberResponse(BaseModel):
20
+ user_id: str
21
+ email: str
22
+ full_name: str
23
+ role: str
24
+
25
+ class Config:
26
+ from_attributes = True
27
+
28
+
29
+ class GroupSiteResponse(BaseModel):
30
+ site_id: str
31
+ site_code: str
32
+ site_name: str
33
+
34
+ class Config:
35
+ from_attributes = True
36
+
37
+
38
+ class GroupResponse(GroupBase):
39
+ id: str
40
+ created_at: datetime
41
+ updated_at: datetime
42
+ member_count: int = 0
43
+ site_count: int = 0
44
+
45
+ class Config:
46
+ from_attributes = True
47
+
48
+
49
+ class GroupDetailResponse(GroupResponse):
50
+ members: list[GroupMemberResponse] = []
51
+ sites: list[GroupSiteResponse] = []
52
+
53
+
54
+ class AssignUserToGroupRequest(BaseModel):
55
+ user_id: str
56
+ role: str = "viewer" # viewer, editor, manager
57
+
58
+
59
+ class AssignSiteToGroupRequest(BaseModel):
60
+ site_id: str
backend/schemas/pipeline.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Pipeline schemas for file upload and processing."""
2
+ from pydantic import BaseModel
3
+ from datetime import datetime
4
+ from typing import Optional
5
+
6
+
7
+ class FileInfo(BaseModel):
8
+ """Information about a file in staging or processed folder."""
9
+ filename: str
10
+ size: int
11
+ modified: datetime
12
+ status: str # staging, processing, processed, error
13
+
14
+
15
+ class FileUploadResponse(BaseModel):
16
+ """Response after uploading a file."""
17
+ filename: str
18
+ size: int
19
+ message: str
20
+
21
+
22
+ class ProcessingResult(BaseModel):
23
+ """Result of processing a single file."""
24
+ filename: str
25
+ status: str # success, error
26
+ records_imported: int
27
+ records_skipped: int
28
+ records_duplicate: int
29
+ error_message: Optional[str] = None
30
+ processing_time: float # seconds
31
+
32
+
33
+ class PipelineStatus(BaseModel):
34
+ """Overall pipeline status."""
35
+ staging_files: list[FileInfo]
36
+ processed_files: list[FileInfo]
37
+ is_processing: bool
38
+ last_run: Optional[datetime] = None
39
+
40
+
41
+ class ProcessRequest(BaseModel):
42
+ """Request to process specific files."""
43
+ filenames: Optional[list[str]] = None # None = process all staging files
44
+
45
+
46
+ class DatabaseExportResponse(BaseModel):
47
+ """Response for database export."""
48
+ filename: str
49
+ size: int
50
+ tables: dict[str, int] # table name -> row count
51
+ created_at: datetime
52
+
53
+
54
+ class DatabaseImportRequest(BaseModel):
55
+ """Request for database import confirmation."""
56
+ confirm: bool = False
backend/schemas/sensor.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+ from pydantic import BaseModel
3
+
4
+
5
+ class SensorDataQuery(BaseModel):
6
+ sites: list[str] # site_codes
7
+ parameters: list[str] # parameter names
8
+ start: datetime | None = None
9
+ end: datetime | None = None
10
+
11
+
12
+ class SensorDataPoint(BaseModel):
13
+ timestamp: datetime
14
+ site_code: str
15
+ values: dict[str, float | None] # parameter_name -> value
16
+
17
+
18
+ class SensorDataResponse(BaseModel):
19
+ data: list[SensorDataPoint]
20
+ sites: list[str]
21
+ parameters: list[str]
22
+ count: int
backend/schemas/site.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+ from pydantic import BaseModel
3
+
4
+
5
+ class CropResponse(BaseModel):
6
+ id: str
7
+ name: str
8
+ display_name: str
9
+ color: str
10
+
11
+ class Config:
12
+ from_attributes = True
13
+
14
+
15
+ class SiteResponse(BaseModel):
16
+ id: str
17
+ site_code: str
18
+ name: str
19
+ crop_id: str
20
+ crop_name: str | None = None
21
+ latitude: float
22
+ longitude: float
23
+ is_active: bool
24
+
25
+ class Config:
26
+ from_attributes = True
27
+
28
+
29
+ class EquipmentGroupResponse(BaseModel):
30
+ id: str
31
+ name: str
32
+ crop_id: str
33
+
34
+ class Config:
35
+ from_attributes = True
36
+
37
+
38
+ class ParameterResponse(BaseModel):
39
+ id: str
40
+ name: str
41
+ display_name: str | None
42
+ unit: str | None
43
+ equipment_group_id: str | None
44
+ equipment_group_name: str | None = None
45
+ min_range: float | None
46
+ max_range: float | None
47
+
48
+ class Config:
49
+ from_attributes = True
backend/schemas/user.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+ from pydantic import BaseModel, EmailStr
3
+
4
+
5
+ class UserBase(BaseModel):
6
+ email: EmailStr
7
+ full_name: str
8
+ is_admin: bool = False
9
+
10
+
11
+ class UserCreate(UserBase):
12
+ password: str
13
+
14
+
15
+ class UserUpdate(BaseModel):
16
+ email: EmailStr | None = None
17
+ full_name: str | None = None
18
+ is_admin: bool | None = None
19
+ is_active: bool | None = None
20
+ password: str | None = None
21
+
22
+
23
+ class UserResponse(UserBase):
24
+ id: str
25
+ is_active: bool
26
+ created_at: datetime
27
+ last_login: datetime | None = None
28
+ groups: list[str] = [] # List of group names
29
+
30
+ class Config:
31
+ from_attributes = True
32
+
33
+
34
+ class UserInDB(UserBase):
35
+ id: str
36
+ password_hash: str
37
+ is_active: bool
38
+ created_at: datetime
39
+
40
+ class Config:
41
+ from_attributes = True
backend/services/__init__.py ADDED
File without changes
backend/services/auth.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+ from sqlalchemy.orm import Session
3
+ from backend.models import User, UserGroup, GroupSite
4
+ from backend.core.security import verify_password, get_password_hash, create_access_token, create_refresh_token
5
+ from backend.schemas.auth import Token
6
+
7
+
8
+ def authenticate_user(db: Session, email: str, password: str) -> User | None:
9
+ user = db.query(User).filter(User.email == email).first()
10
+ if not user:
11
+ return None
12
+ if not verify_password(password, user.password_hash):
13
+ return None
14
+ return user
15
+
16
+
17
+ def create_token_for_user(db: Session, user: User) -> Token:
18
+ # Get user's groups
19
+ user_groups = db.query(UserGroup).filter(UserGroup.user_id == user.id).all()
20
+ group_ids = [ug.group_id for ug in user_groups]
21
+
22
+ # Get sites accessible through user's groups
23
+ if user.is_admin:
24
+ # Admin can see all sites
25
+ from backend.models import Site
26
+ sites = db.query(Site).filter(Site.is_active == True).all()
27
+ site_codes = [s.site_code for s in sites]
28
+ else:
29
+ # Regular user can only see sites from their groups
30
+ group_sites = db.query(GroupSite).filter(GroupSite.group_id.in_(group_ids)).all()
31
+ site_ids = [gs.site_id for gs in group_sites]
32
+ from backend.models import Site
33
+ sites = db.query(Site).filter(Site.id.in_(site_ids), Site.is_active == True).all()
34
+ site_codes = [s.site_code for s in sites]
35
+
36
+ # Common token data
37
+ token_data = {
38
+ "sub": user.id,
39
+ "email": user.email,
40
+ "is_admin": user.is_admin,
41
+ "groups": group_ids,
42
+ "sites": site_codes,
43
+ "token_version": user.token_version,
44
+ }
45
+
46
+ # Create both tokens
47
+ access_token = create_access_token(data=token_data)
48
+
49
+ # Refresh token only needs user ID and token_version for validation
50
+ refresh_data = {
51
+ "sub": user.id,
52
+ "token_version": user.token_version,
53
+ }
54
+ refresh_token = create_refresh_token(data=refresh_data)
55
+
56
+ # Update last login
57
+ user.last_login = datetime.utcnow()
58
+ db.commit()
59
+
60
+ return Token(access_token=access_token, refresh_token=refresh_token)
61
+
62
+
63
+ def create_user(
64
+ db: Session,
65
+ email: str,
66
+ password: str,
67
+ full_name: str,
68
+ is_admin: bool = False
69
+ ) -> User:
70
+ password_hash = get_password_hash(password)
71
+ user = User(
72
+ email=email,
73
+ password_hash=password_hash,
74
+ full_name=full_name,
75
+ is_admin=is_admin
76
+ )
77
+ db.add(user)
78
+ db.commit()
79
+ db.refresh(user)
80
+ return user
81
+
82
+
83
+ def get_user_by_email(db: Session, email: str) -> User | None:
84
+ return db.query(User).filter(User.email == email).first()
backend/services/box_integration.py ADDED
@@ -0,0 +1,312 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Box integration service for OAuth and file operations.
3
+ Handles authentication, folder browsing, file downloads, and file moves.
4
+ """
5
+ import os
6
+ import shutil
7
+ import secrets
8
+ from datetime import datetime, timedelta
9
+ from pathlib import Path
10
+ from typing import Optional
11
+ from urllib.parse import urlencode
12
+
13
+ from boxsdk import OAuth2, Client
14
+ from boxsdk.exception import BoxAPIException
15
+ from sqlalchemy.orm import Session
16
+
17
+ from backend.config import settings
18
+ from backend.models import BoxConnection, BoxSyncLog, AuditLog
19
+
20
+
21
+ # OAuth state storage (in production, use Redis or database)
22
+ _oauth_states: dict[str, datetime] = {}
23
+
24
+
25
+ class BoxService:
26
+ """Service for Box API operations."""
27
+
28
+ def __init__(self, db: Session):
29
+ self.db = db
30
+
31
+ def get_connection(self) -> Optional[BoxConnection]:
32
+ """Get the current Box connection (only one allowed)."""
33
+ return self.db.query(BoxConnection).first()
34
+
35
+ def get_oauth_url(self) -> dict:
36
+ """Generate Box OAuth authorization URL."""
37
+ if not settings.box_client_id or not settings.box_client_secret:
38
+ raise ValueError("Box OAuth credentials not configured. Set BOX_CLIENT_ID and BOX_CLIENT_SECRET in .env")
39
+
40
+ # Generate state token for CSRF protection
41
+ state = secrets.token_urlsafe(32)
42
+ _oauth_states[state] = datetime.utcnow()
43
+
44
+ # Clean old states (older than 10 minutes)
45
+ cutoff = datetime.utcnow() - timedelta(minutes=10)
46
+ for s, t in list(_oauth_states.items()):
47
+ if t < cutoff:
48
+ del _oauth_states[s]
49
+
50
+ # Build authorization URL
51
+ params = {
52
+ 'client_id': settings.box_client_id,
53
+ 'redirect_uri': settings.box_redirect_uri,
54
+ 'response_type': 'code',
55
+ 'state': state,
56
+ }
57
+ auth_url = f"https://account.box.com/api/oauth2/authorize?{urlencode(params)}"
58
+
59
+ return {
60
+ 'auth_url': auth_url,
61
+ 'state': state
62
+ }
63
+
64
+ def exchange_code(self, code: str, state: str) -> BoxConnection:
65
+ """Exchange authorization code for tokens."""
66
+ # Verify state
67
+ if state not in _oauth_states:
68
+ raise ValueError("Invalid or expired state token")
69
+ del _oauth_states[state]
70
+
71
+ # Create OAuth2 object and authenticate
72
+ oauth = OAuth2(
73
+ client_id=settings.box_client_id,
74
+ client_secret=settings.box_client_secret,
75
+ )
76
+
77
+ # Exchange code for tokens
78
+ access_token, refresh_token = oauth.authenticate(code)
79
+
80
+ # Create Box client to get user info
81
+ client = Client(oauth)
82
+ user = client.user().get()
83
+
84
+ # Get or create connection
85
+ connection = self.get_connection()
86
+ if not connection:
87
+ connection = BoxConnection()
88
+ self.db.add(connection)
89
+
90
+ # Update connection with tokens and user info
91
+ connection.access_token = access_token
92
+ connection.refresh_token = refresh_token
93
+ connection.token_expires_at = datetime.utcnow() + timedelta(hours=1)
94
+ connection.box_user_id = user.id
95
+ connection.box_user_name = user.name
96
+ connection.box_user_email = user.login
97
+ connection.updated_at = datetime.utcnow()
98
+
99
+ self.db.commit()
100
+ self.db.refresh(connection)
101
+
102
+ return connection
103
+
104
+ def get_client(self, connection: Optional[BoxConnection] = None) -> Optional[Client]:
105
+ """Get an authenticated Box client, refreshing tokens if needed."""
106
+ if connection is None:
107
+ connection = self.get_connection()
108
+
109
+ if not connection or not connection.access_token:
110
+ return None
111
+
112
+ def store_tokens(access_token, refresh_token):
113
+ """Callback to store refreshed tokens."""
114
+ connection.access_token = access_token
115
+ connection.refresh_token = refresh_token
116
+ connection.token_expires_at = datetime.utcnow() + timedelta(hours=1)
117
+ connection.updated_at = datetime.utcnow()
118
+ self.db.commit()
119
+
120
+ oauth = OAuth2(
121
+ client_id=settings.box_client_id,
122
+ client_secret=settings.box_client_secret,
123
+ access_token=connection.access_token,
124
+ refresh_token=connection.refresh_token,
125
+ store_tokens=store_tokens
126
+ )
127
+
128
+ return Client(oauth)
129
+
130
+ def list_folders(self, folder_id: str = '0', connection: Optional[BoxConnection] = None) -> list[dict]:
131
+ """List folders in a Box folder."""
132
+ client = self.get_client(connection)
133
+ if not client:
134
+ raise ValueError("No Box connection available")
135
+
136
+ try:
137
+ folder = client.folder(folder_id).get()
138
+ items = folder.get_items(limit=100, offset=0)
139
+
140
+ folders = []
141
+ for item in items:
142
+ if item.type == 'folder':
143
+ folders.append({
144
+ 'id': item.id,
145
+ 'name': item.name,
146
+ 'type': 'folder'
147
+ })
148
+
149
+ return folders
150
+ except BoxAPIException as e:
151
+ raise ValueError(f"Box API error: {e.message}")
152
+
153
+ def list_files(self, folder_id: str, connection: Optional[BoxConnection] = None) -> list[dict]:
154
+ """List CSV files in a Box folder."""
155
+ client = self.get_client(connection)
156
+ if not client:
157
+ raise ValueError("No Box connection available")
158
+
159
+ try:
160
+ folder = client.folder(folder_id).get()
161
+ # Request specific fields - mini items from get_items() don't have size/modified_at
162
+ items = folder.get_items(limit=1000, offset=0, fields=['id', 'name', 'type', 'size', 'modified_at'])
163
+
164
+ files = []
165
+ for item in items:
166
+ if item.type == 'file' and item.name.lower().endswith('.csv'):
167
+ files.append({
168
+ 'id': item.id,
169
+ 'name': item.name,
170
+ 'size': getattr(item, 'size', None),
171
+ 'modified_at': getattr(item, 'modified_at', None)
172
+ })
173
+
174
+ return files
175
+ except BoxAPIException as e:
176
+ raise ValueError(f"Box API error: {e.message}")
177
+
178
+ def download_file(self, file_id: str, local_path: Path, connection: Optional[BoxConnection] = None) -> Path:
179
+ """Download a file from Box to local storage."""
180
+ client = self.get_client(connection)
181
+ if not client:
182
+ raise ValueError("No Box connection available")
183
+
184
+ try:
185
+ box_file = client.file(file_id).get()
186
+ with open(local_path, 'wb') as f:
187
+ box_file.download_to(f)
188
+ return local_path
189
+ except BoxAPIException as e:
190
+ raise ValueError(f"Box API error: {e.message}")
191
+
192
+ def move_file(self, file_id: str, dest_folder_id: str, connection: Optional[BoxConnection] = None) -> dict:
193
+ """Move a file to a different folder in Box."""
194
+ client = self.get_client(connection)
195
+ if not client:
196
+ raise ValueError("No Box connection available")
197
+
198
+ try:
199
+ box_file = client.file(file_id)
200
+ moved_file = box_file.move(parent_folder=client.folder(dest_folder_id))
201
+ return {
202
+ 'id': moved_file.id,
203
+ 'name': moved_file.name,
204
+ 'parent_id': moved_file.parent.id
205
+ }
206
+ except BoxAPIException as e:
207
+ raise ValueError(f"Box API error: {e.message}")
208
+
209
+ def upload_file(
210
+ self,
211
+ file_path: Path,
212
+ folder_id: str,
213
+ filename: Optional[str] = None,
214
+ connection: Optional[BoxConnection] = None
215
+ ) -> dict:
216
+ """Upload a file to a Box folder."""
217
+ client = self.get_client(connection)
218
+ if not client:
219
+ raise ValueError("No Box connection available")
220
+
221
+ try:
222
+ folder = client.folder(folder_id)
223
+ upload_filename = filename or file_path.name
224
+
225
+ with open(file_path, 'rb') as f:
226
+ uploaded_file = folder.upload_stream(f, upload_filename)
227
+
228
+ return {
229
+ 'id': uploaded_file.id,
230
+ 'name': uploaded_file.name,
231
+ 'size': uploaded_file.size if hasattr(uploaded_file, 'size') else None
232
+ }
233
+ except BoxAPIException as e:
234
+ raise ValueError(f"Box API error: {e.message}")
235
+
236
+ def update_folder_config(
237
+ self,
238
+ staging_folder_id: str,
239
+ staging_folder_name: str,
240
+ processed_folder_id: str,
241
+ processed_folder_name: str,
242
+ sync_interval_minutes: int = 60,
243
+ connection: Optional[BoxConnection] = None
244
+ ) -> BoxConnection:
245
+ """Update folder configuration for the Box connection."""
246
+ if connection is None:
247
+ connection = self.get_connection()
248
+
249
+ if not connection:
250
+ raise ValueError("No Box connection available")
251
+
252
+ connection.staging_folder_id = staging_folder_id
253
+ connection.staging_folder_name = staging_folder_name
254
+ connection.processed_folder_id = processed_folder_id
255
+ connection.processed_folder_name = processed_folder_name
256
+ connection.sync_interval_minutes = sync_interval_minutes
257
+ connection.is_active = True
258
+ connection.updated_at = datetime.utcnow()
259
+
260
+ self.db.commit()
261
+ self.db.refresh(connection)
262
+
263
+ return connection
264
+
265
+ def disconnect(self) -> bool:
266
+ """Remove Box connection and clear tokens."""
267
+ connection = self.get_connection()
268
+ if connection:
269
+ self.db.delete(connection)
270
+ self.db.commit()
271
+ return True
272
+ return False
273
+
274
+ def create_sync_log(self, connection_id: str) -> BoxSyncLog:
275
+ """Create a new sync log entry."""
276
+ log = BoxSyncLog(connection_id=connection_id)
277
+ self.db.add(log)
278
+ self.db.commit()
279
+ self.db.refresh(log)
280
+ return log
281
+
282
+ def update_sync_log(
283
+ self,
284
+ log: BoxSyncLog,
285
+ status: str,
286
+ files_found: int = 0,
287
+ files_processed: int = 0,
288
+ files_failed: int = 0,
289
+ records_imported: int = 0,
290
+ error_message: Optional[str] = None,
291
+ details: Optional[dict] = None
292
+ ):
293
+ """Update a sync log entry."""
294
+ log.status = status
295
+ log.files_found = files_found
296
+ log.files_processed = files_processed
297
+ log.files_failed = files_failed
298
+ log.records_imported = records_imported
299
+ log.error_message = error_message
300
+ log.details = details
301
+ if status in ['success', 'error']:
302
+ log.completed_at = datetime.utcnow()
303
+ self.db.commit()
304
+
305
+ def get_sync_logs(self, limit: int = 20) -> list[BoxSyncLog]:
306
+ """Get recent sync logs."""
307
+ return self.db.query(BoxSyncLog).order_by(BoxSyncLog.started_at.desc()).limit(limit).all()
308
+
309
+
310
+ def is_box_configured() -> bool:
311
+ """Check if Box OAuth credentials are configured."""
312
+ return bool(settings.box_client_id and settings.box_client_secret)
backend/services/box_worker.py ADDED
@@ -0,0 +1,404 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Box background worker for periodic file sync.
3
+ Downloads files from Box staging folder, processes them, and moves to processed folder.
4
+ """
5
+ import time
6
+ from datetime import datetime
7
+ from pathlib import Path
8
+ from typing import Optional
9
+ import threading
10
+
11
+ from apscheduler.schedulers.background import BackgroundScheduler
12
+ from apscheduler.triggers.interval import IntervalTrigger
13
+ from sqlalchemy.orm import Session
14
+
15
+ from backend.database import SessionLocal
16
+ from backend.models import BoxConnection, BoxSyncLog, AuditLog
17
+ from backend.services.box_integration import BoxService
18
+ from backend.services import pipeline_worker
19
+ from backend.config import settings
20
+
21
+
22
+ # Global scheduler instance
23
+ _scheduler: Optional[BackgroundScheduler] = None
24
+ _is_syncing = False
25
+ _sync_lock = threading.Lock()
26
+
27
+
28
+ def get_scheduler() -> BackgroundScheduler:
29
+ """Get or create the background scheduler."""
30
+ global _scheduler
31
+ if _scheduler is None:
32
+ _scheduler = BackgroundScheduler()
33
+ return _scheduler
34
+
35
+
36
+ def start_scheduler():
37
+ """Start the background scheduler if not already running."""
38
+ scheduler = get_scheduler()
39
+ if not scheduler.running:
40
+ scheduler.start()
41
+ print("Box sync scheduler started")
42
+
43
+
44
+ def stop_scheduler():
45
+ """Stop the background scheduler."""
46
+ global _scheduler
47
+ if _scheduler and _scheduler.running:
48
+ _scheduler.shutdown(wait=False)
49
+ _scheduler = None
50
+ print("Box sync scheduler stopped")
51
+
52
+
53
+ def update_sync_schedule(interval_minutes: int = 60):
54
+ """Update the sync schedule interval."""
55
+ scheduler = get_scheduler()
56
+
57
+ # Remove existing job if any
58
+ if scheduler.get_job('box_sync'):
59
+ scheduler.remove_job('box_sync')
60
+
61
+ # Add new job with updated interval
62
+ scheduler.add_job(
63
+ run_box_sync,
64
+ trigger=IntervalTrigger(minutes=interval_minutes),
65
+ id='box_sync',
66
+ name='Box File Sync',
67
+ replace_existing=True,
68
+ max_instances=1
69
+ )
70
+
71
+ if not scheduler.running:
72
+ start_scheduler()
73
+
74
+ print(f"Box sync scheduled every {interval_minutes} minutes")
75
+
76
+
77
+ def run_box_sync(force: bool = False) -> dict:
78
+ """
79
+ Run Box sync operation.
80
+ Downloads files from Box staging, processes them, and moves to processed folder.
81
+
82
+ Args:
83
+ force: If True, run even if another sync is in progress
84
+
85
+ Returns:
86
+ dict with sync results
87
+ """
88
+ global _is_syncing
89
+
90
+ # Check if already syncing
91
+ with _sync_lock:
92
+ if _is_syncing and not force:
93
+ return {'status': 'skipped', 'message': 'Sync already in progress'}
94
+ _is_syncing = True
95
+
96
+ result = {
97
+ 'status': 'success',
98
+ 'files_found': 0,
99
+ 'files_processed': 0,
100
+ 'files_failed': 0,
101
+ 'records_imported': 0,
102
+ 'errors': []
103
+ }
104
+
105
+ db = SessionLocal()
106
+ try:
107
+ box_service = BoxService(db)
108
+ connection = box_service.get_connection()
109
+
110
+ if not connection or not connection.is_active:
111
+ result['status'] = 'skipped'
112
+ result['message'] = 'No active Box connection'
113
+ return result
114
+
115
+ if not connection.staging_folder_id or not connection.processed_folder_id:
116
+ result['status'] = 'error'
117
+ result['message'] = 'Box folders not configured'
118
+ return result
119
+
120
+ # Create sync log
121
+ sync_log = box_service.create_sync_log(connection.id)
122
+
123
+ try:
124
+ # List files in staging folder
125
+ files = box_service.list_files(connection.staging_folder_id, connection)
126
+ result['files_found'] = len(files)
127
+
128
+ if not files:
129
+ box_service.update_sync_log(
130
+ sync_log,
131
+ status='success',
132
+ files_found=0,
133
+ details={'message': 'No files to process'}
134
+ )
135
+ connection.last_sync = datetime.utcnow()
136
+ connection.last_sync_status = 'success'
137
+ connection.last_sync_message = 'No new files'
138
+ db.commit()
139
+ return result
140
+
141
+ # Process each file
142
+ staging_dir = Path(settings.uploads_dir) / "staging"
143
+ staging_dir.mkdir(parents=True, exist_ok=True)
144
+
145
+ for file_info in files:
146
+ file_id = file_info['id']
147
+ file_name = file_info['name']
148
+
149
+ try:
150
+ # Download to local staging
151
+ local_path = staging_dir / f"box_{datetime.now().strftime('%Y%m%d_%H%M%S')}_{file_name}"
152
+ box_service.download_file(file_id, local_path, connection)
153
+
154
+ # Process the file using existing pipeline
155
+ process_result = pipeline_worker.process_single_file(db, local_path, user_id=None)
156
+
157
+ if process_result['status'] == 'success':
158
+ result['files_processed'] += 1
159
+ result['records_imported'] += process_result['records_imported']
160
+
161
+ # Move file to processed folder in Box
162
+ try:
163
+ box_service.move_file(file_id, connection.processed_folder_id, connection)
164
+ except Exception as move_err:
165
+ result['errors'].append(f"Failed to move {file_name} in Box: {str(move_err)}")
166
+ else:
167
+ result['files_failed'] += 1
168
+ result['errors'].append(f"{file_name}: {process_result.get('error_message', 'Unknown error')}")
169
+
170
+ except Exception as file_err:
171
+ result['files_failed'] += 1
172
+ result['errors'].append(f"{file_name}: {str(file_err)}")
173
+
174
+ # Update sync log
175
+ final_status = 'success' if result['files_failed'] == 0 else 'partial'
176
+ if result['files_processed'] == 0 and result['files_failed'] > 0:
177
+ final_status = 'error'
178
+
179
+ box_service.update_sync_log(
180
+ sync_log,
181
+ status=final_status,
182
+ files_found=result['files_found'],
183
+ files_processed=result['files_processed'],
184
+ files_failed=result['files_failed'],
185
+ records_imported=result['records_imported'],
186
+ error_message='; '.join(result['errors'][:5]) if result['errors'] else None,
187
+ details={'errors': result['errors']}
188
+ )
189
+
190
+ # Update connection status
191
+ connection.last_sync = datetime.utcnow()
192
+ connection.last_sync_status = final_status
193
+ connection.last_sync_message = f"Processed {result['files_processed']}/{result['files_found']} files"
194
+ connection.files_processed_count += result['files_processed']
195
+ db.commit()
196
+
197
+ result['status'] = final_status
198
+
199
+ except Exception as e:
200
+ box_service.update_sync_log(
201
+ sync_log,
202
+ status='error',
203
+ error_message=str(e)
204
+ )
205
+ connection.last_sync = datetime.utcnow()
206
+ connection.last_sync_status = 'error'
207
+ connection.last_sync_message = str(e)
208
+ db.commit()
209
+
210
+ result['status'] = 'error'
211
+ result['errors'].append(str(e))
212
+
213
+ finally:
214
+ db.close()
215
+ with _sync_lock:
216
+ _is_syncing = False
217
+
218
+ return result
219
+
220
+
221
+ def get_sync_status() -> dict:
222
+ """Get current sync status."""
223
+ db = SessionLocal()
224
+ try:
225
+ box_service = BoxService(db)
226
+ connection = box_service.get_connection()
227
+
228
+ if not connection:
229
+ return {
230
+ 'is_connected': False,
231
+ 'is_syncing': _is_syncing,
232
+ 'scheduler_running': _scheduler.running if _scheduler else False
233
+ }
234
+
235
+ return {
236
+ 'is_connected': True,
237
+ 'is_active': connection.is_active,
238
+ 'is_syncing': _is_syncing,
239
+ 'scheduler_running': _scheduler.running if _scheduler else False,
240
+ 'last_sync': connection.last_sync.isoformat() if connection.last_sync else None,
241
+ 'last_sync_status': connection.last_sync_status,
242
+ 'last_sync_message': connection.last_sync_message,
243
+ 'sync_interval_minutes': connection.sync_interval_minutes,
244
+ 'files_processed_total': connection.files_processed_count
245
+ }
246
+ finally:
247
+ db.close()
248
+
249
+
250
+ def initialize_box_scheduler():
251
+ """Initialize the Box scheduler based on active connection settings."""
252
+ db = SessionLocal()
253
+ try:
254
+ box_service = BoxService(db)
255
+ connection = box_service.get_connection()
256
+
257
+ if connection and connection.is_active and connection.staging_folder_id:
258
+ update_sync_schedule(connection.sync_interval_minutes)
259
+ print(f"Box scheduler initialized with {connection.sync_interval_minutes} minute interval")
260
+
261
+ # Initialize backup scheduler if configured
262
+ if connection and connection.backup_enabled and connection.backup_schedule != "manual":
263
+ update_backup_schedule(connection.backup_schedule, connection.backup_time)
264
+ print(f"Backup scheduler initialized: {connection.backup_schedule}")
265
+ finally:
266
+ db.close()
267
+
268
+
269
+ # ============== Database Backup Functions ==============
270
+
271
+ _backup_scheduler: Optional[BackgroundScheduler] = None
272
+
273
+
274
+ def get_backup_scheduler() -> BackgroundScheduler:
275
+ """Get or create the backup scheduler."""
276
+ global _backup_scheduler
277
+ if _backup_scheduler is None:
278
+ _backup_scheduler = BackgroundScheduler()
279
+ return _backup_scheduler
280
+
281
+
282
+ def update_backup_schedule(schedule: Optional[str], backup_time: Optional[str]):
283
+ """Update or disable the backup schedule."""
284
+ scheduler = get_backup_scheduler()
285
+
286
+ # Remove existing backup job if any
287
+ if scheduler.get_job('database_backup'):
288
+ scheduler.remove_job('database_backup')
289
+
290
+ if schedule is None or schedule == "manual":
291
+ print("Backup scheduler disabled")
292
+ return
293
+
294
+ # Parse backup time (default to 02:00 if not specified)
295
+ hour = 2
296
+ minute = 0
297
+ if backup_time:
298
+ try:
299
+ parts = backup_time.split(':')
300
+ hour = int(parts[0])
301
+ minute = int(parts[1]) if len(parts) > 1 else 0
302
+ except (ValueError, IndexError):
303
+ pass
304
+
305
+ # Configure trigger based on schedule
306
+ if schedule == "daily":
307
+ from apscheduler.triggers.cron import CronTrigger
308
+ trigger = CronTrigger(hour=hour, minute=minute)
309
+ print(f"Backup scheduled daily at {hour:02d}:{minute:02d}")
310
+ elif schedule == "weekly":
311
+ from apscheduler.triggers.cron import CronTrigger
312
+ # Weekly on Sunday
313
+ trigger = CronTrigger(day_of_week='sun', hour=hour, minute=minute)
314
+ print(f"Backup scheduled weekly on Sunday at {hour:02d}:{minute:02d}")
315
+ else:
316
+ print(f"Unknown backup schedule: {schedule}")
317
+ return
318
+
319
+ scheduler.add_job(
320
+ run_database_backup,
321
+ trigger=trigger,
322
+ id='database_backup',
323
+ name='Database Backup to Box',
324
+ replace_existing=True,
325
+ max_instances=1
326
+ )
327
+
328
+ if not scheduler.running:
329
+ scheduler.start()
330
+
331
+
332
+ def run_database_backup(triggered_by: str = "scheduler") -> dict:
333
+ """
334
+ Run database backup to Box.
335
+
336
+ Copies the encrypted SQLite database to the configured Box backup folder.
337
+ """
338
+ result = {
339
+ 'status': 'success',
340
+ 'message': '',
341
+ 'filename': None,
342
+ 'triggered_by': triggered_by
343
+ }
344
+
345
+ db = SessionLocal()
346
+ try:
347
+ box_service = BoxService(db)
348
+ connection = box_service.get_connection()
349
+
350
+ if not connection:
351
+ result['status'] = 'error'
352
+ result['message'] = 'No Box connection available'
353
+ return result
354
+
355
+ if not connection.backup_folder_id:
356
+ result['status'] = 'error'
357
+ result['message'] = 'Backup folder not configured'
358
+ return result
359
+
360
+ # Get database file path
361
+ db_path = Path(settings.database_url.replace("sqlite:///", ""))
362
+
363
+ if not db_path.exists():
364
+ result['status'] = 'error'
365
+ result['message'] = 'Database file not found'
366
+ return result
367
+
368
+ # Generate backup filename with timestamp
369
+ timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
370
+ backup_filename = f"csg_dashboard_backup_{timestamp}.db"
371
+
372
+ try:
373
+ # Upload to Box
374
+ box_service.upload_file(
375
+ file_path=db_path,
376
+ folder_id=connection.backup_folder_id,
377
+ filename=backup_filename,
378
+ connection=connection
379
+ )
380
+
381
+ result['status'] = 'success'
382
+ result['message'] = f'Backup uploaded successfully: {backup_filename}'
383
+ result['filename'] = backup_filename
384
+
385
+ # Update connection status
386
+ connection.last_backup = datetime.utcnow()
387
+ connection.last_backup_status = 'success'
388
+ connection.last_backup_message = f'Uploaded {backup_filename}'
389
+ db.commit()
390
+
391
+ except Exception as e:
392
+ result['status'] = 'error'
393
+ result['message'] = f'Upload failed: {str(e)}'
394
+
395
+ # Update connection status
396
+ connection.last_backup = datetime.utcnow()
397
+ connection.last_backup_status = 'error'
398
+ connection.last_backup_message = str(e)
399
+ db.commit()
400
+
401
+ finally:
402
+ db.close()
403
+
404
+ return result
backend/services/data_import.py ADDED
@@ -0,0 +1,271 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Data import service for loading sample sensor data.
3
+ """
4
+ import pandas as pd
5
+ from pathlib import Path
6
+ from datetime import datetime
7
+ import json
8
+ from sqlalchemy.orm import Session
9
+ from backend.models import Site, SensorData, Crop, EquipmentGroup, Parameter
10
+
11
+
12
+ def get_site_map(db: Session) -> dict[str, str]:
13
+ """Get mapping of site_code to site_id."""
14
+ sites = db.query(Site).all()
15
+ return {s.site_code: s.id for s in sites}
16
+
17
+
18
+ def get_crop_map(db: Session) -> dict[str, str]:
19
+ """Get mapping of crop_name to crop_id."""
20
+ crops = db.query(Crop).all()
21
+ return {c.name: c.id for c in crops}
22
+
23
+
24
+ def import_equipment_groups(db: Session, base_path: Path):
25
+ """Import equipment groups and parameters from CSV files."""
26
+ crop_map = get_crop_map(db)
27
+
28
+ equipment_files = {
29
+ "almonds": "Almond_Equipment.csv",
30
+ "grapes": "Grape_Equipment.csv",
31
+ "olives": "Olive_Equipment.csv",
32
+ "pistachios": "Pistachio_Equipment.csv",
33
+ }
34
+
35
+ for crop_name, filename in equipment_files.items():
36
+ filepath = base_path / "read-in-csvs" / filename
37
+ if not filepath.exists():
38
+ print(f"Warning: {filepath} not found, skipping")
39
+ continue
40
+
41
+ crop_id = crop_map.get(crop_name)
42
+ if not crop_id:
43
+ print(f"Warning: Crop '{crop_name}' not found in database")
44
+ continue
45
+
46
+ # Read CSV - row 0 is parameter names (headers), row 1 is equipment groups
47
+ df = pd.read_csv(filepath, header=0, nrows=1)
48
+
49
+ # Get unique equipment groups for this crop
50
+ equipment_groups = set(df.iloc[0].values)
51
+
52
+ # Create equipment groups
53
+ eq_group_map = {}
54
+ for eq_name in equipment_groups:
55
+ if pd.isna(eq_name) or not eq_name:
56
+ continue
57
+ existing = db.query(EquipmentGroup).filter(
58
+ EquipmentGroup.name == eq_name,
59
+ EquipmentGroup.crop_id == crop_id
60
+ ).first()
61
+ if not existing:
62
+ eq = EquipmentGroup(name=eq_name, crop_id=crop_id)
63
+ db.add(eq)
64
+ db.flush()
65
+ eq_group_map[eq_name] = eq.id
66
+ else:
67
+ eq_group_map[eq_name] = existing.id
68
+
69
+ # Create parameters
70
+ for param_name in df.columns:
71
+ eq_name = df.iloc[0][param_name]
72
+ if pd.isna(eq_name):
73
+ eq_id = None
74
+ else:
75
+ eq_id = eq_group_map.get(eq_name)
76
+
77
+ existing = db.query(Parameter).filter(
78
+ Parameter.name == param_name,
79
+ Parameter.crop_id == crop_id
80
+ ).first()
81
+ if not existing:
82
+ param = Parameter(
83
+ name=param_name,
84
+ display_name=param_name.replace("_", " ").title(),
85
+ crop_id=crop_id,
86
+ equipment_group_id=eq_id
87
+ )
88
+ db.add(param)
89
+
90
+ db.commit()
91
+ print(f"Imported equipment groups for {crop_name}")
92
+
93
+ # Also add equipment groups for table_grapes (copy from grapes)
94
+ grapes_id = crop_map.get("grapes")
95
+ table_grapes_id = crop_map.get("table_grapes")
96
+ if grapes_id and table_grapes_id:
97
+ grape_eqs = db.query(EquipmentGroup).filter(EquipmentGroup.crop_id == grapes_id).all()
98
+ for eq in grape_eqs:
99
+ existing = db.query(EquipmentGroup).filter(
100
+ EquipmentGroup.name == eq.name,
101
+ EquipmentGroup.crop_id == table_grapes_id
102
+ ).first()
103
+ if not existing:
104
+ new_eq = EquipmentGroup(name=eq.name, crop_id=table_grapes_id)
105
+ db.add(new_eq)
106
+
107
+ grape_params = db.query(Parameter).filter(Parameter.crop_id == grapes_id).all()
108
+ for param in grape_params:
109
+ existing = db.query(Parameter).filter(
110
+ Parameter.name == param.name,
111
+ Parameter.crop_id == table_grapes_id
112
+ ).first()
113
+ if not existing:
114
+ # Find equipment group ID for table grapes
115
+ eq_id = None
116
+ if param.equipment_group:
117
+ new_eq = db.query(EquipmentGroup).filter(
118
+ EquipmentGroup.name == param.equipment_group.name,
119
+ EquipmentGroup.crop_id == table_grapes_id
120
+ ).first()
121
+ if new_eq:
122
+ eq_id = new_eq.id
123
+
124
+ new_param = Parameter(
125
+ name=param.name,
126
+ display_name=param.display_name,
127
+ crop_id=table_grapes_id,
128
+ equipment_group_id=eq_id
129
+ )
130
+ db.add(new_param)
131
+ db.commit()
132
+ print("Copied equipment groups to table_grapes")
133
+
134
+
135
+ def import_sensor_data_from_csv(
136
+ db: Session,
137
+ csv_path: Path,
138
+ site_map: dict[str, str],
139
+ batch_size: int = 500
140
+ ) -> int:
141
+ """Import sensor data from a CSV file."""
142
+ print(f"Reading {csv_path.name}...")
143
+
144
+ # Read CSV
145
+ df = pd.read_csv(csv_path, low_memory=False)
146
+
147
+ # Check for required columns
148
+ if "TIMESTAMP" not in df.columns:
149
+ print(f"Error: TIMESTAMP column not found in {csv_path}")
150
+ return 0
151
+ if "Site" not in df.columns:
152
+ print(f"Error: Site column not found in {csv_path}")
153
+ return 0
154
+
155
+ # Columns to exclude from data JSON
156
+ exclude_cols = {"TIMESTAMP", "Site", "RECORD", "Unnamed: 0"}
157
+ data_cols = [c for c in df.columns if c not in exclude_cols]
158
+
159
+ imported = 0
160
+ skipped = 0
161
+ duplicates = 0
162
+
163
+ for idx, row in df.iterrows():
164
+ site_code = row.get("Site")
165
+ if pd.isna(site_code) or site_code not in site_map:
166
+ skipped += 1
167
+ continue
168
+
169
+ try:
170
+ timestamp = pd.to_datetime(row["TIMESTAMP"])
171
+ except Exception:
172
+ skipped += 1
173
+ continue
174
+
175
+ site_id = site_map[site_code]
176
+ ts = timestamp.to_pydatetime()
177
+
178
+ # Check for existing record
179
+ existing = db.query(SensorData).filter(
180
+ SensorData.site_id == site_id,
181
+ SensorData.timestamp == ts
182
+ ).first()
183
+
184
+ if existing:
185
+ duplicates += 1
186
+ continue
187
+
188
+ # Build data dict with all sensor values
189
+ data = {}
190
+ for col in data_cols:
191
+ val = row[col]
192
+ if pd.notna(val):
193
+ # Convert numpy types to Python types
194
+ if hasattr(val, 'item'):
195
+ val = val.item()
196
+ data[col] = val
197
+
198
+ record_num = row.get("RECORD")
199
+ if pd.notna(record_num):
200
+ record_num = int(record_num)
201
+ else:
202
+ record_num = None
203
+
204
+ sensor_data = SensorData(
205
+ site_id=site_id,
206
+ timestamp=ts,
207
+ data=data,
208
+ record_number=record_num
209
+ )
210
+ db.add(sensor_data)
211
+ imported += 1
212
+
213
+ if imported % 500 == 0:
214
+ db.commit()
215
+ if imported % 2000 == 0:
216
+ print(f" Imported {imported} records...")
217
+
218
+ db.commit()
219
+ print(f" Completed: {imported} imported, {duplicates} duplicates, {skipped} skipped")
220
+ return imported
221
+
222
+
223
+ def import_all_sample_data(db: Session, original_repo_path: str):
224
+ """Import all sample data from the original repo."""
225
+ base_path = Path(original_repo_path)
226
+ sample_path = base_path / "sample-data"
227
+
228
+ if not sample_path.exists():
229
+ raise FileNotFoundError(f"Sample data path not found: {sample_path}")
230
+
231
+ # Import equipment groups first
232
+ print("\n=== Importing Equipment Groups ===")
233
+ import_equipment_groups(db, base_path)
234
+
235
+ # Get site mapping
236
+ site_map = get_site_map(db)
237
+ print(f"\nFound {len(site_map)} sites in database")
238
+
239
+ # Import sensor data from each CSV
240
+ print("\n=== Importing Sensor Data ===")
241
+ csv_files = ["trex_data.csv", "matt_data.csv", "lynn_data.csv"]
242
+ total = 0
243
+
244
+ for csv_file in csv_files:
245
+ csv_path = sample_path / csv_file
246
+ if csv_path.exists():
247
+ count = import_sensor_data_from_csv(db, csv_path, site_map)
248
+ total += count
249
+ else:
250
+ print(f"Warning: {csv_path} not found")
251
+
252
+ print(f"\n=== Import Complete ===")
253
+ print(f"Total records imported: {total}")
254
+ return total
255
+
256
+
257
+ if __name__ == "__main__":
258
+ # Run as standalone script
259
+ import sys
260
+ sys.path.insert(0, str(Path(__file__).parent.parent.parent))
261
+
262
+ from backend.database import SessionLocal, init_db
263
+
264
+ init_db()
265
+ db = SessionLocal()
266
+
267
+ try:
268
+ original_repo = "/Users/rich/Dev/cropdash/crop-dashboard"
269
+ import_all_sample_data(db, original_repo)
270
+ finally:
271
+ db.close()
backend/services/pipeline_worker.py ADDED
@@ -0,0 +1,383 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Pipeline worker for processing CSI datalogger files.
3
+ Handles file upload, validation, processing, and archival.
4
+ """
5
+ import os
6
+ import shutil
7
+ import time
8
+ import hashlib
9
+ from pathlib import Path
10
+ from datetime import datetime
11
+ from typing import Optional
12
+ import pandas as pd
13
+ from sqlalchemy.orm import Session
14
+
15
+ from backend.models import Site, SensorData, FileArchive, AuditLog
16
+ from backend.config import settings
17
+
18
+
19
+ # Paths
20
+ UPLOAD_BASE = Path(settings.uploads_dir)
21
+ STAGING_DIR = UPLOAD_BASE / "staging"
22
+ PROCESSED_DIR = UPLOAD_BASE / "processed"
23
+
24
+ # Ensure directories exist
25
+ STAGING_DIR.mkdir(parents=True, exist_ok=True)
26
+ PROCESSED_DIR.mkdir(parents=True, exist_ok=True)
27
+
28
+ # Processing state
29
+ _is_processing = False
30
+ _last_run: Optional[datetime] = None
31
+
32
+
33
+ def get_file_hash(filepath: Path) -> str:
34
+ """Calculate SHA256 hash of a file."""
35
+ sha256 = hashlib.sha256()
36
+ with open(filepath, 'rb') as f:
37
+ for chunk in iter(lambda: f.read(8192), b''):
38
+ sha256.update(chunk)
39
+ return sha256.hexdigest()
40
+
41
+
42
+ def list_staging_files() -> list[dict]:
43
+ """List all files in the staging directory."""
44
+ files = []
45
+ for f in STAGING_DIR.iterdir():
46
+ if f.is_file() and f.suffix.lower() == '.csv':
47
+ stat = f.stat()
48
+ files.append({
49
+ 'filename': f.name,
50
+ 'size': stat.st_size,
51
+ 'modified': datetime.fromtimestamp(stat.st_mtime),
52
+ 'status': 'staging'
53
+ })
54
+ return sorted(files, key=lambda x: x['modified'], reverse=True)
55
+
56
+
57
+ def list_processed_files() -> list[dict]:
58
+ """List all files in the processed directory."""
59
+ files = []
60
+ for f in PROCESSED_DIR.iterdir():
61
+ if f.is_file() and f.suffix.lower() == '.csv':
62
+ stat = f.stat()
63
+ files.append({
64
+ 'filename': f.name,
65
+ 'size': stat.st_size,
66
+ 'modified': datetime.fromtimestamp(stat.st_mtime),
67
+ 'status': 'processed'
68
+ })
69
+ return sorted(files, key=lambda x: x['modified'], reverse=True)
70
+
71
+
72
+ def validate_csv_file(filepath: Path) -> tuple[bool, str]:
73
+ """
74
+ Validate that a CSV file is a valid CSI datalogger format.
75
+ Returns (is_valid, error_message).
76
+ """
77
+ try:
78
+ # Try reading with CSI format (skip metadata rows)
79
+ df = pd.read_csv(
80
+ filepath,
81
+ header=[0],
82
+ skiprows=[0, 2, 3] if _is_csi_format(filepath) else None,
83
+ sep=',',
84
+ na_values="NAN",
85
+ engine='python',
86
+ nrows=10 # Just check first few rows
87
+ )
88
+
89
+ # Check for required columns
90
+ if 'TIMESTAMP' not in df.columns:
91
+ return False, "Missing TIMESTAMP column"
92
+ if 'Site' not in df.columns:
93
+ return False, "Missing Site column"
94
+
95
+ return True, ""
96
+ except Exception as e:
97
+ return False, str(e)
98
+
99
+
100
+ def _is_csi_format(filepath: Path) -> bool:
101
+ """Check if file is CSI datalogger format by looking at first line."""
102
+ try:
103
+ with open(filepath, 'r') as f:
104
+ first_line = f.readline()
105
+ return first_line.startswith('"TOA5"') or first_line.startswith('TOA5')
106
+ except Exception:
107
+ return False
108
+
109
+
110
+ def process_single_file(
111
+ db: Session,
112
+ filepath: Path,
113
+ user_id: Optional[str] = None
114
+ ) -> dict:
115
+ """
116
+ Process a single CSV file and import data to database.
117
+ Returns processing result dict.
118
+ """
119
+ start_time = time.time()
120
+ result = {
121
+ 'filename': filepath.name,
122
+ 'status': 'error',
123
+ 'records_imported': 0,
124
+ 'records_skipped': 0,
125
+ 'records_duplicate': 0,
126
+ 'error_message': None,
127
+ 'processing_time': 0.0
128
+ }
129
+
130
+ try:
131
+ # Validate file
132
+ is_valid, error = validate_csv_file(filepath)
133
+ if not is_valid:
134
+ result['error_message'] = f"Validation failed: {error}"
135
+ return result
136
+
137
+ # Get file hash for deduplication tracking
138
+ file_hash = get_file_hash(filepath)
139
+
140
+ # Check if already processed
141
+ existing_archive = db.query(FileArchive).filter(
142
+ FileArchive.file_hash == file_hash
143
+ ).first()
144
+ if existing_archive and existing_archive.status == 'completed':
145
+ result['error_message'] = "File already processed (duplicate hash)"
146
+ result['status'] = 'duplicate'
147
+ return result
148
+
149
+ # Create or update archive record
150
+ if existing_archive:
151
+ archive = existing_archive
152
+ archive.status = 'processing'
153
+ else:
154
+ archive = FileArchive(
155
+ original_filename=filepath.name,
156
+ file_hash=file_hash,
157
+ file_size=filepath.stat().st_size,
158
+ status='processing'
159
+ )
160
+ db.add(archive)
161
+ db.commit()
162
+
163
+ # Read the CSV
164
+ is_csi = _is_csi_format(filepath)
165
+ if is_csi:
166
+ df = pd.read_csv(
167
+ filepath,
168
+ header=[0],
169
+ skiprows=[0, 2, 3],
170
+ sep=',',
171
+ na_values="NAN",
172
+ engine='python',
173
+ low_memory=False
174
+ )
175
+ else:
176
+ df = pd.read_csv(filepath, low_memory=False)
177
+
178
+ # Get site mapping
179
+ sites = db.query(Site).all()
180
+ site_map = {s.site_code: s.id for s in sites}
181
+
182
+ # Columns to exclude from data JSON
183
+ exclude_cols = {'TIMESTAMP', 'Site', 'RECORD', 'Unnamed: 0'}
184
+ data_cols = [c for c in df.columns if c not in exclude_cols]
185
+
186
+ imported = 0
187
+ skipped = 0
188
+ duplicates = 0
189
+
190
+ for idx, row in df.iterrows():
191
+ site_code = row.get('Site')
192
+ if pd.isna(site_code) or site_code not in site_map:
193
+ skipped += 1
194
+ continue
195
+
196
+ try:
197
+ timestamp = pd.to_datetime(row['TIMESTAMP'])
198
+ except Exception:
199
+ skipped += 1
200
+ continue
201
+
202
+ site_id = site_map[site_code]
203
+ ts = timestamp.to_pydatetime()
204
+
205
+ # Check for existing record (upsert logic)
206
+ existing = db.query(SensorData).filter(
207
+ SensorData.site_id == site_id,
208
+ SensorData.timestamp == ts
209
+ ).first()
210
+
211
+ if existing:
212
+ duplicates += 1
213
+ continue
214
+
215
+ # Build data dict
216
+ data = {}
217
+ for col in data_cols:
218
+ val = row[col]
219
+ if pd.notna(val):
220
+ if hasattr(val, 'item'):
221
+ val = val.item()
222
+ data[col] = val
223
+
224
+ # Apply QA/QC filters (like original)
225
+ # Skip bad values
226
+ if data.get('LW_IN', 0) < -10000000:
227
+ skipped += 1
228
+ continue
229
+
230
+ record_num = row.get('RECORD')
231
+ if pd.notna(record_num):
232
+ record_num = int(record_num)
233
+ else:
234
+ record_num = None
235
+
236
+ sensor_data = SensorData(
237
+ site_id=site_id,
238
+ timestamp=ts,
239
+ data=data,
240
+ record_number=record_num
241
+ )
242
+ db.add(sensor_data)
243
+ imported += 1
244
+
245
+ # Batch commit
246
+ if imported % 500 == 0:
247
+ db.commit()
248
+
249
+ db.commit()
250
+
251
+ # Update archive record
252
+ archive.status = 'completed'
253
+ archive.processed_date = datetime.utcnow()
254
+ archive.records_imported = imported
255
+
256
+ # Move file to processed
257
+ processed_path = PROCESSED_DIR / f"{datetime.now().strftime('%Y%m%d_%H%M%S')}_{filepath.name}"
258
+ archive.archived_path = str(processed_path)
259
+ shutil.move(str(filepath), str(processed_path))
260
+
261
+ db.commit()
262
+
263
+ # Log audit event
264
+ if user_id:
265
+ audit = AuditLog(
266
+ user_id=user_id,
267
+ action='pipeline_import',
268
+ resource_type='sensor_data',
269
+ resource_id=archive.id,
270
+ details={
271
+ 'filename': filepath.name,
272
+ 'records_imported': imported,
273
+ 'records_skipped': skipped,
274
+ 'records_duplicate': duplicates
275
+ }
276
+ )
277
+ db.add(audit)
278
+ db.commit()
279
+
280
+ result['status'] = 'success'
281
+ result['records_imported'] = imported
282
+ result['records_skipped'] = skipped
283
+ result['records_duplicate'] = duplicates
284
+
285
+ except Exception as e:
286
+ result['error_message'] = str(e)
287
+ db.rollback()
288
+
289
+ # Update archive with error
290
+ try:
291
+ archive = db.query(FileArchive).filter(
292
+ FileArchive.original_filename == filepath.name
293
+ ).first()
294
+ if archive:
295
+ archive.status = 'error'
296
+ archive.error_message = str(e)
297
+ db.commit()
298
+ except Exception:
299
+ pass
300
+
301
+ result['processing_time'] = time.time() - start_time
302
+ return result
303
+
304
+
305
+ def process_all_staging_files(
306
+ db: Session,
307
+ user_id: Optional[str] = None,
308
+ filenames: Optional[list[str]] = None
309
+ ) -> list[dict]:
310
+ """
311
+ Process all files in staging directory (or specified files).
312
+ Returns list of processing results.
313
+ """
314
+ global _is_processing, _last_run
315
+
316
+ if _is_processing:
317
+ return [{'status': 'error', 'error_message': 'Processing already in progress'}]
318
+
319
+ _is_processing = True
320
+ results = []
321
+
322
+ try:
323
+ # Get files to process
324
+ if filenames:
325
+ files = [STAGING_DIR / f for f in filenames if (STAGING_DIR / f).exists()]
326
+ else:
327
+ files = [f for f in STAGING_DIR.iterdir() if f.is_file() and f.suffix.lower() == '.csv']
328
+
329
+ for filepath in files:
330
+ result = process_single_file(db, filepath, user_id)
331
+ results.append(result)
332
+
333
+ _last_run = datetime.utcnow()
334
+
335
+ finally:
336
+ _is_processing = False
337
+
338
+ return results
339
+
340
+
341
+ def get_pipeline_status() -> dict:
342
+ """Get current pipeline status."""
343
+ return {
344
+ 'staging_files': list_staging_files(),
345
+ 'processed_files': list_processed_files(),
346
+ 'is_processing': _is_processing,
347
+ 'last_run': _last_run
348
+ }
349
+
350
+
351
+ def save_uploaded_file(file_content: bytes, filename: str) -> Path:
352
+ """Save an uploaded file to the staging directory."""
353
+ # Sanitize filename
354
+ safe_filename = "".join(c for c in filename if c.isalnum() or c in '._-')
355
+ if not safe_filename.lower().endswith('.csv'):
356
+ safe_filename += '.csv'
357
+
358
+ # Add timestamp to avoid conflicts
359
+ timestamped_name = f"{datetime.now().strftime('%Y%m%d_%H%M%S')}_{safe_filename}"
360
+ filepath = STAGING_DIR / timestamped_name
361
+
362
+ with open(filepath, 'wb') as f:
363
+ f.write(file_content)
364
+
365
+ return filepath
366
+
367
+
368
+ def delete_staging_file(filename: str) -> bool:
369
+ """Delete a file from staging directory."""
370
+ filepath = STAGING_DIR / filename
371
+ if filepath.exists() and filepath.is_file():
372
+ filepath.unlink()
373
+ return True
374
+ return False
375
+
376
+
377
+ def delete_processed_file(filename: str) -> bool:
378
+ """Delete a file from processed directory."""
379
+ filepath = PROCESSED_DIR / filename
380
+ if filepath.exists() and filepath.is_file():
381
+ filepath.unlink()
382
+ return True
383
+ return False
backend/static/CSG_LOGO_light.png ADDED

Git LFS Details

  • SHA256: 650a67c251fb2b6b980ddc49b053ada0b7fb194c52e615162165ad78a4da391c
  • Pointer size: 132 Bytes
  • Size of remote file: 3.15 MB
backend/static/apple-touch-icon.png ADDED
backend/static/assets/SiteMapContent-BZBnHuK2.js ADDED
The diff for this file is too large to render. See raw diff
 
backend/static/assets/index-CNGvhoA_.css ADDED
@@ -0,0 +1 @@
 
 
1
+ .leaflet-pane,.leaflet-tile,.leaflet-marker-icon,.leaflet-marker-shadow,.leaflet-tile-container,.leaflet-pane>svg,.leaflet-pane>canvas,.leaflet-zoom-box,.leaflet-image-layer,.leaflet-layer{position:absolute;left:0;top:0}.leaflet-container{overflow:hidden}.leaflet-tile,.leaflet-marker-icon,.leaflet-marker-shadow{-webkit-user-select:none;-moz-user-select:none;user-select:none;-webkit-user-drag:none}.leaflet-tile::-moz-selection{background:transparent}.leaflet-tile::selection{background:transparent}.leaflet-safari .leaflet-tile{image-rendering:-webkit-optimize-contrast}.leaflet-safari .leaflet-tile-container{width:1600px;height:1600px;-webkit-transform-origin:0 0}.leaflet-marker-icon,.leaflet-marker-shadow{display:block}.leaflet-container .leaflet-overlay-pane svg{max-width:none!important;max-height:none!important}.leaflet-container .leaflet-marker-pane img,.leaflet-container .leaflet-shadow-pane img,.leaflet-container .leaflet-tile-pane img,.leaflet-container img.leaflet-image-layer,.leaflet-container .leaflet-tile{max-width:none!important;max-height:none!important;width:auto;padding:0}.leaflet-container img.leaflet-tile{mix-blend-mode:plus-lighter}.leaflet-container.leaflet-touch-zoom{touch-action:pan-x pan-y}.leaflet-container.leaflet-touch-drag{touch-action:none;touch-action:pinch-zoom}.leaflet-container.leaflet-touch-drag.leaflet-touch-zoom{touch-action:none}.leaflet-container{-webkit-tap-highlight-color:transparent}.leaflet-container a{-webkit-tap-highlight-color:rgba(51,181,229,.4)}.leaflet-tile{filter:inherit;visibility:hidden}.leaflet-tile-loaded{visibility:inherit}.leaflet-zoom-box{width:0;height:0;box-sizing:border-box;z-index:800}.leaflet-overlay-pane svg{-moz-user-select:none}.leaflet-pane{z-index:400}.leaflet-tile-pane{z-index:200}.leaflet-overlay-pane{z-index:400}.leaflet-shadow-pane{z-index:500}.leaflet-marker-pane{z-index:600}.leaflet-tooltip-pane{z-index:650}.leaflet-popup-pane{z-index:700}.leaflet-map-pane canvas{z-index:100}.leaflet-map-pane svg{z-index:200}.leaflet-vml-shape{width:1px;height:1px}.lvml{behavior:url(#default#VML);display:inline-block;position:absolute}.leaflet-control{position:relative;z-index:800;pointer-events:visiblePainted;pointer-events:auto}.leaflet-top,.leaflet-bottom{position:absolute;z-index:1000;pointer-events:none}.leaflet-top{top:0}.leaflet-right{right:0}.leaflet-bottom{bottom:0}.leaflet-left{left:0}.leaflet-control{float:left;clear:both}.leaflet-right .leaflet-control{float:right}.leaflet-top .leaflet-control{margin-top:10px}.leaflet-bottom .leaflet-control{margin-bottom:10px}.leaflet-left .leaflet-control{margin-left:10px}.leaflet-right .leaflet-control{margin-right:10px}.leaflet-fade-anim .leaflet-popup{opacity:0;transition:opacity .2s linear}.leaflet-fade-anim .leaflet-map-pane .leaflet-popup{opacity:1}.leaflet-zoom-animated{transform-origin:0 0}svg.leaflet-zoom-animated{will-change:transform}.leaflet-zoom-anim .leaflet-zoom-animated{transition:transform .25s cubic-bezier(0,0,.25,1)}.leaflet-zoom-anim .leaflet-tile,.leaflet-pan-anim .leaflet-tile{transition:none}.leaflet-zoom-anim .leaflet-zoom-hide{visibility:hidden}.leaflet-interactive{cursor:pointer}.leaflet-grab{cursor:grab}.leaflet-crosshair,.leaflet-crosshair .leaflet-interactive{cursor:crosshair}.leaflet-popup-pane,.leaflet-control{cursor:auto}.leaflet-dragging .leaflet-grab,.leaflet-dragging .leaflet-grab .leaflet-interactive,.leaflet-dragging .leaflet-marker-draggable{cursor:move;cursor:grabbing}.leaflet-marker-icon,.leaflet-marker-shadow,.leaflet-image-layer,.leaflet-pane>svg path,.leaflet-tile-container{pointer-events:none}.leaflet-marker-icon.leaflet-interactive,.leaflet-image-layer.leaflet-interactive,.leaflet-pane>svg path.leaflet-interactive,svg.leaflet-image-layer.leaflet-interactive path{pointer-events:visiblePainted;pointer-events:auto}.leaflet-container{background:#ddd;outline-offset:1px}.leaflet-container a{color:#0078a8}.leaflet-zoom-box{border:2px dotted #38f;background:#ffffff80}.leaflet-container{font-family:Helvetica Neue,Arial,Helvetica,sans-serif;font-size:12px;font-size:.75rem;line-height:1.5}.leaflet-bar{box-shadow:0 1px 5px #000000a6;border-radius:4px}.leaflet-bar a{background-color:#fff;border-bottom:1px solid #ccc;width:26px;height:26px;line-height:26px;display:block;text-align:center;text-decoration:none;color:#000}.leaflet-bar a,.leaflet-control-layers-toggle{background-position:50% 50%;background-repeat:no-repeat;display:block}.leaflet-bar a:hover,.leaflet-bar a:focus{background-color:#f4f4f4}.leaflet-bar a:first-child{border-top-left-radius:4px;border-top-right-radius:4px}.leaflet-bar a:last-child{border-bottom-left-radius:4px;border-bottom-right-radius:4px;border-bottom:none}.leaflet-bar a.leaflet-disabled{cursor:default;background-color:#f4f4f4;color:#bbb}.leaflet-touch .leaflet-bar a{width:30px;height:30px;line-height:30px}.leaflet-touch .leaflet-bar a:first-child{border-top-left-radius:2px;border-top-right-radius:2px}.leaflet-touch .leaflet-bar a:last-child{border-bottom-left-radius:2px;border-bottom-right-radius:2px}.leaflet-control-zoom-in,.leaflet-control-zoom-out{font:700 18px Lucida Console,Monaco,monospace;text-indent:1px}.leaflet-touch .leaflet-control-zoom-in,.leaflet-touch .leaflet-control-zoom-out{font-size:22px}.leaflet-control-layers{box-shadow:0 1px 5px #0006;background:#fff;border-radius:5px}.leaflet-control-layers-toggle{background-image:url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABoAAAAaCAQAAAADQ4RFAAACf0lEQVR4AY1UM3gkARTePdvdoTxXKc+qTl3aU5U6b2Kbkz3Gtq3Zw6ziLGNPzrYx7946Tr6/ee/XeCQ4D3ykPtL5tHno4n0d/h3+xfuWHGLX81cn7r0iTNzjr7LrlxCqPtkbTQEHeqOrTy4Yyt3VCi/IOB0v7rVC7q45Q3Gr5K6jt+3Gl5nCoDD4MtO+j96Wu8atmhGqcNGHObuf8OM/x3AMx38+4Z2sPqzCxRFK2aF2e5Jol56XTLyggAMTL56XOMoS1W4pOyjUcGGQdZxU6qRh7B9Zp+PfpOFlqt0zyDZckPi1ttmIp03jX8gyJ8a/PG2yutpS/Vol7peZIbZcKBAEEheEIAgFbDkz5H6Zrkm2hVWGiXKiF4Ycw0RWKdtC16Q7qe3X4iOMxruonzegJzWaXFrU9utOSsLUmrc0YjeWYjCW4PDMADElpJSSQ0vQvA1Tm6/JlKnqFs1EGyZiFCqnRZTEJJJiKRYzVYzJck2Rm6P4iH+cmSY0YzimYa8l0EtTODFWhcMIMVqdsI2uiTvKmTisIDHJ3od5GILVhBCarCfVRmo4uTjkhrhzkiBV7SsaqS+TzrzM1qpGGUFt28pIySQHR6h7F6KSwGWm97ay+Z+ZqMcEjEWebE7wxCSQwpkhJqoZA5ivCdZDjJepuJ9IQjGGUmuXJdBFUygxVqVsxFsLMbDe8ZbDYVCGKxs+W080max1hFCarCfV+C1KATwcnvE9gRRuMP2prdbWGowm1KB1y+zwMMENkM755cJ2yPDtqhTI6ED1M/82yIDtC/4j4BijjeObflpO9I9MwXTCsSX8jWAFeHr05WoLTJ5G8IQVS/7vwR6ohirYM7f6HzYpogfS3R2OAAAAAElFTkSuQmCC);width:36px;height:36px}.leaflet-retina .leaflet-control-layers-toggle{background-image:url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADQAAAA0CAQAAABvcdNgAAAEsklEQVR4AWL4TydIhpZK1kpWOlg0w3ZXP6D2soBtG42jeI6ZmQTHzAxiTbSJsYLjO9HhP+WOmcuhciVnmHVQcJnp7DFvScowZorad/+V/fVzMdMT2g9Cv9guXGv/7pYOrXh2U+RRR3dSd9JRx6bIFc/ekqHI29JC6pJ5ZEh1yWkhkbcFeSjxgx3L2m1cb1C7bceyxA+CNjT/Ifff+/kDk2u/w/33/IeCMOSaWZ4glosqT3DNnNZQ7Cs58/3Ce5HL78iZH/vKVIaYlqzfdLu8Vi7dnvUbEza5Idt36tquZFldl6N5Z/POLof0XLK61mZCmJSWjVF9tEjUluu74IUXvgttuVIHE7YxSkaYhJZam7yiM9Pv82JYfl9nptxZaxMJE4YSPty+vF0+Y2up9d3wwijfjZbabqm/3bZ9ecKHsiGmRflnn1MW4pjHf9oLufyn2z3y1D6n8g8TZhxyzipLNPnAUpsOiuWimg52psrTZYnOWYNDTMuWBWa0tJb4rgq1UvmutpaYEbZlwU3CLJm/ayYjHW5/h7xWLn9Hh1vepDkyf7dE7MtT5LR4e7yYpHrkhOUpEfssBLq2pPhAqoSWKUkk7EDqkmK6RrCEzqDjhNDWNE+XSMvkJRDWlZTmCW0l0PHQGRZY5t1L83kT0Y3l2SItk5JAWHl2dCOBm+fPu3fo5/3v61RMCO9Jx2EEYYhb0rmNQMX/vm7gqOEJLcXTGw3CAuRNeyaPWwjR8PRqKQ1PDA/dpv+on9Shox52WFnx0KY8onHayrJzm87i5h9xGw/tfkev0jGsQizqezUKjk12hBMKJ4kbCqGPVNXudyyrShovGw5CgxsRICxF6aRmSjlBnHRzg7Gx8fKqEubI2rahQYdR1YgDIRQO7JvQyD52hoIQx0mxa0ODtW2Iozn1le2iIRdzwWewedyZzewidueOGqlsn1MvcnQpuVwLGG3/IR1hIKxCjelIDZ8ldqWz25jWAsnldEnK0Zxro19TGVb2ffIZEsIO89EIEDvKMPrzmBOQcKQ+rroye6NgRRxqR4U8EAkz0CL6uSGOm6KQCdWjvjRiSP1BPalCRS5iQYiEIvxuBMJEWgzSoHADcVMuN7IuqqTeyUPq22qFimFtxDyBBJEwNyt6TM88blFHao/6tWWhuuOM4SAK4EI4QmFHA+SEyWlp4EQoJ13cYGzMu7yszEIBOm2rVmHUNqwAIQabISNMRstmdhNWcFLsSm+0tjJH1MdRxO5Nx0WDMhCtgD6OKgZeljJqJKc9po8juskR9XN0Y1lZ3mWjLR9JCO1jRDMd0fpYC2VnvjBSEFg7wBENc0R9HFlb0xvF1+TBEpF68d+DHR6IOWVv2BECtxo46hOFUBd/APU57WIoEwJhIi2CdpyZX0m93BZicktMj1AS9dClteUFAUNUIEygRZCtik5zSxI9MubTBH1GOiHsiLJ3OCoSZkILa9PxiN0EbvhsAo8tdAf9Seepd36lGWHmtNANTv5Jd0z4QYyeo/UEJqxKRpg5LZx6btLPsOaEmdMyxYdlc8LMaJnikDlhclqmPiQnTEpLUIZEwkRagjYkEibQErwhkTAKCLQEbUgkzJQWc/0PstHHcfEdQ+UAAAAASUVORK5CYII=);background-size:26px 26px}.leaflet-touch .leaflet-control-layers-toggle{width:44px;height:44px}.leaflet-control-layers .leaflet-control-layers-list,.leaflet-control-layers-expanded .leaflet-control-layers-toggle{display:none}.leaflet-control-layers-expanded .leaflet-control-layers-list{display:block;position:relative}.leaflet-control-layers-expanded{padding:6px 10px 6px 6px;color:#333;background:#fff}.leaflet-control-layers-scrollbar{overflow-y:scroll;overflow-x:hidden;padding-right:5px}.leaflet-control-layers-selector{margin-top:2px;position:relative;top:1px}.leaflet-control-layers label{display:block;font-size:13px;font-size:1.08333em}.leaflet-control-layers-separator{height:0;border-top:1px solid #ddd;margin:5px -10px 5px -6px}.leaflet-default-icon-path{background-image:url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABkAAAApCAYAAADAk4LOAAAFgUlEQVR4Aa1XA5BjWRTN2oW17d3YaZtr2962HUzbDNpjszW24mRt28p47v7zq/bXZtrp/lWnXr337j3nPCe85NcypgSFdugCpW5YoDAMRaIMqRi6aKq5E3YqDQO3qAwjVWrD8Ncq/RBpykd8oZUb/kaJutow8r1aP9II0WmLKLIsJyv1w/kqw9Ch2MYdB++12Onxee/QMwvf4/Dk/Lfp/i4nxTXtOoQ4pW5Aj7wpici1A9erdAN2OH64x8OSP9j3Ft3b7aWkTg/Fm91siTra0f9on5sQr9INejH6CUUUpavjFNq1B+Oadhxmnfa8RfEmN8VNAsQhPqF55xHkMzz3jSmChWU6f7/XZKNH+9+hBLOHYozuKQPxyMPUKkrX/K0uWnfFaJGS1QPRtZsOPtr3NsW0uyh6NNCOkU3Yz+bXbT3I8G3xE5EXLXtCXbbqwCO9zPQYPRTZ5vIDXD7U+w7rFDEoUUf7ibHIR4y6bLVPXrz8JVZEql13trxwue/uDivd3fkWRbS6/IA2bID4uk0UpF1N8qLlbBlXs4Ee7HLTfV1j54APvODnSfOWBqtKVvjgLKzF5YdEk5ewRkGlK0i33Eofffc7HT56jD7/6U+qH3Cx7SBLNntH5YIPvODnyfIXZYRVDPqgHtLs5ABHD3YzLuespb7t79FY34DjMwrVrcTuwlT55YMPvOBnRrJ4VXTdNnYug5ucHLBjEpt30701A3Ts+HEa73u6dT3FNWwflY86eMHPk+Yu+i6pzUpRrW7SNDg5JHR4KapmM5Wv2E8Tfcb1HoqqHMHU+uWDD7zg54mz5/2BSnizi9T1Dg4QQXLToGNCkb6tb1NU+QAlGr1++eADrzhn/u8Q2YZhQVlZ5+CAOtqfbhmaUCS1ezNFVm2imDbPmPng5wmz+gwh+oHDce0eUtQ6OGDIyR0uUhUsoO3vfDmmgOezH0mZN59x7MBi++WDL1g/eEiU3avlidO671bkLfwbw5XV2P8Pzo0ydy4t2/0eu33xYSOMOD8hTf4CrBtGMSoXfPLchX+J0ruSePw3LZeK0juPJbYzrhkH0io7B3k164hiGvawhOKMLkrQLyVpZg8rHFW7E2uHOL888IBPlNZ1FPzstSJM694fWr6RwpvcJK60+0HCILTBzZLFNdtAzJaohze60T8qBzyh5ZuOg5e7uwQppofEmf2++DYvmySqGBuKaicF1blQjhuHdvCIMvp8whTTfZzI7RldpwtSzL+F1+wkdZ2TBOW2gIF88PBTzD/gpeREAMEbxnJcaJHNHrpzji0gQCS6hdkEeYt9DF/2qPcEC8RM28Hwmr3sdNyht00byAut2k3gufWNtgtOEOFGUwcXWNDbdNbpgBGxEvKkOQsxivJx33iow0Vw5S6SVTrpVq11ysA2Rp7gTfPfktc6zhtXBBC+adRLshf6sG2RfHPZ5EAc4sVZ83yCN00Fk/4kggu40ZTvIEm5g24qtU4KjBrx/BTTH8ifVASAG7gKrnWxJDcU7x8X6Ecczhm3o6YicvsLXWfh3Ch1W0k8x0nXF+0fFxgt4phz8QvypiwCCFKMqXCnqXExjq10beH+UUA7+nG6mdG/Pu0f3LgFcGrl2s0kNNjpmoJ9o4B29CMO8dMT4Q5ox8uitF6fqsrJOr8qnwNbRzv6hSnG5wP+64C7h9lp30hKNtKdWjtdkbuPA19nJ7Tz3zR/ibgARbhb4AlhavcBebmTHcFl2fvYEnW0ox9xMxKBS8btJ+KiEbq9zA4RthQXDhPa0T9TEe69gWupwc6uBUphquXgf+/FrIjweHQS4/pduMe5ERUMHUd9xv8ZR98CxkS4F2n3EUrUZ10EYNw7BWm9x1GiPssi3GgiGRDKWRYZfXlON+dfNbM+GgIwYdwAAAAASUVORK5CYII=)}.leaflet-container .leaflet-control-attribution{background:#fff;background:#fffc;margin:0}.leaflet-control-attribution,.leaflet-control-scale-line{padding:0 5px;color:#333;line-height:1.4}.leaflet-control-attribution a{text-decoration:none}.leaflet-control-attribution a:hover,.leaflet-control-attribution a:focus{text-decoration:underline}.leaflet-attribution-flag{display:inline!important;vertical-align:baseline!important;width:1em;height:.6669em}.leaflet-left .leaflet-control-scale{margin-left:5px}.leaflet-bottom .leaflet-control-scale{margin-bottom:5px}.leaflet-control-scale-line{border:2px solid #777;border-top:none;line-height:1.1;padding:2px 5px 1px;white-space:nowrap;box-sizing:border-box;background:#fffc;text-shadow:1px 1px #fff}.leaflet-control-scale-line:not(:first-child){border-top:2px solid #777;border-bottom:none;margin-top:-2px}.leaflet-control-scale-line:not(:first-child):not(:last-child){border-bottom:2px solid #777}.leaflet-touch .leaflet-control-attribution,.leaflet-touch .leaflet-control-layers,.leaflet-touch .leaflet-bar{box-shadow:none}.leaflet-touch .leaflet-control-layers,.leaflet-touch .leaflet-bar{border:2px solid rgba(0,0,0,.2);background-clip:padding-box}.leaflet-popup{position:absolute;text-align:center;margin-bottom:20px}.leaflet-popup-content-wrapper{padding:1px;text-align:left;border-radius:12px}.leaflet-popup-content{margin:13px 24px 13px 20px;line-height:1.3;font-size:13px;font-size:1.08333em;min-height:1px}.leaflet-popup-content p{margin:1.3em 0}.leaflet-popup-tip-container{width:40px;height:20px;position:absolute;left:50%;margin-top:-1px;margin-left:-20px;overflow:hidden;pointer-events:none}.leaflet-popup-tip{width:17px;height:17px;padding:1px;margin:-10px auto 0;pointer-events:auto;transform:rotate(45deg)}.leaflet-popup-content-wrapper,.leaflet-popup-tip{background:#fff;color:#333;box-shadow:0 3px 14px #0006}.leaflet-container a.leaflet-popup-close-button{position:absolute;top:0;right:0;border:none;text-align:center;width:24px;height:24px;font:16px/24px Tahoma,Verdana,sans-serif;color:#757575;text-decoration:none;background:transparent}.leaflet-container a.leaflet-popup-close-button:hover,.leaflet-container a.leaflet-popup-close-button:focus{color:#585858}.leaflet-popup-scrolled{overflow:auto}.leaflet-oldie .leaflet-popup-content-wrapper{-ms-zoom:1}.leaflet-oldie .leaflet-popup-tip{width:24px;margin:0 auto;-ms-filter:"progid:DXImageTransform.Microsoft.Matrix(M11=0.70710678, M12=0.70710678, M21=-0.70710678, M22=0.70710678)";filter:progid:DXImageTransform.Microsoft.Matrix(M11=.70710678,M12=.70710678,M21=-.70710678,M22=.70710678)}.leaflet-oldie .leaflet-control-zoom,.leaflet-oldie .leaflet-control-layers,.leaflet-oldie .leaflet-popup-content-wrapper,.leaflet-oldie .leaflet-popup-tip{border:1px solid #999}.leaflet-div-icon{background:#fff;border:1px solid #666}.leaflet-tooltip{position:absolute;padding:6px;background-color:#fff;border:1px solid #fff;border-radius:3px;color:#222;white-space:nowrap;-webkit-user-select:none;-moz-user-select:none;user-select:none;pointer-events:none;box-shadow:0 1px 3px #0006}.leaflet-tooltip.leaflet-interactive{cursor:pointer;pointer-events:auto}.leaflet-tooltip-top:before,.leaflet-tooltip-bottom:before,.leaflet-tooltip-left:before,.leaflet-tooltip-right:before{position:absolute;pointer-events:none;border:6px solid transparent;background:transparent;content:""}.leaflet-tooltip-bottom{margin-top:6px}.leaflet-tooltip-top{margin-top:-6px}.leaflet-tooltip-bottom:before,.leaflet-tooltip-top:before{left:50%;margin-left:-6px}.leaflet-tooltip-top:before{bottom:0;margin-bottom:-12px;border-top-color:#fff}.leaflet-tooltip-bottom:before{top:0;margin-top:-12px;margin-left:-6px;border-bottom-color:#fff}.leaflet-tooltip-left{margin-left:-6px}.leaflet-tooltip-right{margin-left:6px}.leaflet-tooltip-left:before,.leaflet-tooltip-right:before{top:50%;margin-top:-6px}.leaflet-tooltip-left:before{right:0;margin-right:-12px;border-left-color:#fff}.leaflet-tooltip-right:before{left:0;margin-left:-12px;border-right-color:#fff}@media print{.leaflet-control{-webkit-print-color-adjust:exact;print-color-adjust:exact}}*,:before,:after{--tw-border-spacing-x: 0;--tw-border-spacing-y: 0;--tw-translate-x: 0;--tw-translate-y: 0;--tw-rotate: 0;--tw-skew-x: 0;--tw-skew-y: 0;--tw-scale-x: 1;--tw-scale-y: 1;--tw-pan-x: ;--tw-pan-y: ;--tw-pinch-zoom: ;--tw-scroll-snap-strictness: proximity;--tw-gradient-from-position: ;--tw-gradient-via-position: ;--tw-gradient-to-position: ;--tw-ordinal: ;--tw-slashed-zero: ;--tw-numeric-figure: ;--tw-numeric-spacing: ;--tw-numeric-fraction: ;--tw-ring-inset: ;--tw-ring-offset-width: 0px;--tw-ring-offset-color: #fff;--tw-ring-color: rgb(59 130 246 / .5);--tw-ring-offset-shadow: 0 0 #0000;--tw-ring-shadow: 0 0 #0000;--tw-shadow: 0 0 #0000;--tw-shadow-colored: 0 0 #0000;--tw-blur: ;--tw-brightness: ;--tw-contrast: ;--tw-grayscale: ;--tw-hue-rotate: ;--tw-invert: ;--tw-saturate: ;--tw-sepia: ;--tw-drop-shadow: ;--tw-backdrop-blur: ;--tw-backdrop-brightness: ;--tw-backdrop-contrast: ;--tw-backdrop-grayscale: ;--tw-backdrop-hue-rotate: ;--tw-backdrop-invert: ;--tw-backdrop-opacity: ;--tw-backdrop-saturate: ;--tw-backdrop-sepia: ;--tw-contain-size: ;--tw-contain-layout: ;--tw-contain-paint: ;--tw-contain-style: }::backdrop{--tw-border-spacing-x: 0;--tw-border-spacing-y: 0;--tw-translate-x: 0;--tw-translate-y: 0;--tw-rotate: 0;--tw-skew-x: 0;--tw-skew-y: 0;--tw-scale-x: 1;--tw-scale-y: 1;--tw-pan-x: ;--tw-pan-y: ;--tw-pinch-zoom: ;--tw-scroll-snap-strictness: proximity;--tw-gradient-from-position: ;--tw-gradient-via-position: ;--tw-gradient-to-position: ;--tw-ordinal: ;--tw-slashed-zero: ;--tw-numeric-figure: ;--tw-numeric-spacing: ;--tw-numeric-fraction: ;--tw-ring-inset: ;--tw-ring-offset-width: 0px;--tw-ring-offset-color: #fff;--tw-ring-color: rgb(59 130 246 / .5);--tw-ring-offset-shadow: 0 0 #0000;--tw-ring-shadow: 0 0 #0000;--tw-shadow: 0 0 #0000;--tw-shadow-colored: 0 0 #0000;--tw-blur: ;--tw-brightness: ;--tw-contrast: ;--tw-grayscale: ;--tw-hue-rotate: ;--tw-invert: ;--tw-saturate: ;--tw-sepia: ;--tw-drop-shadow: ;--tw-backdrop-blur: ;--tw-backdrop-brightness: ;--tw-backdrop-contrast: ;--tw-backdrop-grayscale: ;--tw-backdrop-hue-rotate: ;--tw-backdrop-invert: ;--tw-backdrop-opacity: ;--tw-backdrop-saturate: ;--tw-backdrop-sepia: ;--tw-contain-size: ;--tw-contain-layout: ;--tw-contain-paint: ;--tw-contain-style: }*,:before,:after{box-sizing:border-box;border-width:0;border-style:solid;border-color:#e5e7eb}:before,:after{--tw-content: ""}html,:host{line-height:1.5;-webkit-text-size-adjust:100%;-moz-tab-size:4;-o-tab-size:4;tab-size:4;font-family:ui-sans-serif,system-ui,sans-serif,"Apple Color Emoji","Segoe UI Emoji",Segoe UI Symbol,"Noto Color Emoji";font-feature-settings:normal;font-variation-settings:normal;-webkit-tap-highlight-color:transparent}body{margin:0;line-height:inherit}hr{height:0;color:inherit;border-top-width:1px}abbr:where([title]){-webkit-text-decoration:underline dotted;text-decoration:underline dotted}h1,h2,h3,h4,h5,h6{font-size:inherit;font-weight:inherit}a{color:inherit;text-decoration:inherit}b,strong{font-weight:bolder}code,kbd,samp,pre{font-family:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace;font-feature-settings:normal;font-variation-settings:normal;font-size:1em}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}table{text-indent:0;border-color:inherit;border-collapse:collapse}button,input,optgroup,select,textarea{font-family:inherit;font-feature-settings:inherit;font-variation-settings:inherit;font-size:100%;font-weight:inherit;line-height:inherit;letter-spacing:inherit;color:inherit;margin:0;padding:0}button,select{text-transform:none}button,input:where([type=button]),input:where([type=reset]),input:where([type=submit]){-webkit-appearance:button;background-color:transparent;background-image:none}:-moz-focusring{outline:auto}:-moz-ui-invalid{box-shadow:none}progress{vertical-align:baseline}::-webkit-inner-spin-button,::-webkit-outer-spin-button{height:auto}[type=search]{-webkit-appearance:textfield;outline-offset:-2px}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{-webkit-appearance:button;font:inherit}summary{display:list-item}blockquote,dl,dd,h1,h2,h3,h4,h5,h6,hr,figure,p,pre{margin:0}fieldset{margin:0;padding:0}legend{padding:0}ol,ul,menu{list-style:none;margin:0;padding:0}dialog{padding:0}textarea{resize:vertical}input::-moz-placeholder,textarea::-moz-placeholder{opacity:1;color:#9ca3af}input::placeholder,textarea::placeholder{opacity:1;color:#9ca3af}button,[role=button]{cursor:pointer}:disabled{cursor:default}img,svg,video,canvas,audio,iframe,embed,object{display:block;vertical-align:middle}img,video{max-width:100%;height:auto}[hidden]:where(:not([hidden=until-found])){display:none}:root{--background: 0 0% 98%;--foreground: 222.2 84% 4.9%;--card: 0 0% 100%;--card-foreground: 222.2 84% 4.9%;--primary: 221.2 83.2% 53.3%;--primary-foreground: 210 40% 98%;--secondary: 210 40% 96.1%;--secondary-foreground: 222.2 47.4% 11.2%;--muted: 210 40% 96.1%;--muted-foreground: 215.4 16.3% 46.9%;--accent: 210 40% 96.1%;--accent-foreground: 222.2 47.4% 11.2%;--destructive: 0 84.2% 60.2%;--destructive-foreground: 210 40% 98%;--border: 214.3 31.8% 91.4%;--input: 214.3 31.8% 91.4%;--ring: 221.2 83.2% 53.3%;--radius: .5rem}.dark{--background: 222.2 84% 4.9%;--foreground: 210 40% 98%;--card: 222.2 84% 4.9%;--card-foreground: 210 40% 98%;--primary: 217.2 91.2% 59.8%;--primary-foreground: 222.2 47.4% 11.2%;--secondary: 217.2 32.6% 17.5%;--secondary-foreground: 210 40% 98%;--muted: 217.2 32.6% 17.5%;--muted-foreground: 215 20.2% 65.1%;--accent: 217.2 32.6% 17.5%;--accent-foreground: 210 40% 98%;--destructive: 0 62.8% 30.6%;--destructive-foreground: 210 40% 98%;--border: 217.2 32.6% 17.5%;--input: 217.2 32.6% 17.5%;--ring: 224.3 76.3% 48%}*{border-color:hsl(var(--border))}body{background-color:hsl(var(--background));color:hsl(var(--foreground));font-feature-settings:"rlig" 1,"calt" 1}.pointer-events-none{pointer-events:none}.pointer-events-auto{pointer-events:auto}.visible{visibility:visible}.fixed{position:fixed}.absolute{position:absolute}.relative{position:relative}.sticky{position:sticky}.inset-0{top:0;right:0;bottom:0;left:0}.bottom-0{bottom:0}.bottom-2{bottom:.5rem}.bottom-4{bottom:1rem}.left-0{left:0}.left-0\.5{left:.125rem}.left-1\/2{left:50%}.left-2{left:.5rem}.left-3{left:.75rem}.right-0{right:0}.right-2{right:.5rem}.right-3{right:.75rem}.right-4{right:1rem}.top-0{top:0}.top-0\.5{top:.125rem}.top-1\/2{top:50%}.top-16{top:4rem}.top-2{top:.5rem}.z-0{z-index:0}.z-10{z-index:10}.z-40{z-index:40}.z-50{z-index:50}.z-\[1000\]{z-index:1000}.z-\[100\]{z-index:100}.mx-1{margin-left:.25rem;margin-right:.25rem}.mx-auto{margin-left:auto;margin-right:auto}.mb-1{margin-bottom:.25rem}.mb-1\.5{margin-bottom:.375rem}.mb-2{margin-bottom:.5rem}.mb-3{margin-bottom:.75rem}.mb-4{margin-bottom:1rem}.mb-6{margin-bottom:1.5rem}.mb-8{margin-bottom:2rem}.ml-2{margin-left:.5rem}.ml-auto{margin-left:auto}.mr-1{margin-right:.25rem}.mt-0\.5{margin-top:.125rem}.mt-1{margin-top:.25rem}.mt-2{margin-top:.5rem}.mt-3{margin-top:.75rem}.mt-32{margin-top:8rem}.mt-4{margin-top:1rem}.mt-6{margin-top:1.5rem}.mt-8{margin-top:2rem}.mt-auto{margin-top:auto}.block{display:block}.inline-block{display:inline-block}.flex{display:flex}.inline-flex{display:inline-flex}.table{display:table}.grid{display:grid}.hidden{display:none}.h-1{height:.25rem}.h-10{height:2.5rem}.h-12{height:3rem}.h-14{height:3.5rem}.h-16{height:4rem}.h-2{height:.5rem}.h-2\.5{height:.625rem}.h-24{height:6rem}.h-3{height:.75rem}.h-3\.5{height:.875rem}.h-32{height:8rem}.h-4{height:1rem}.h-5{height:1.25rem}.h-6{height:1.5rem}.h-7{height:1.75rem}.h-8{height:2rem}.h-\[400px\]{height:400px}.h-full{height:100%}.max-h-64{max-height:16rem}.max-h-72{max-height:18rem}.max-h-80{max-height:20rem}.max-h-96{max-height:24rem}.max-h-\[200px\]{max-height:200px}.max-h-full{max-height:100%}.min-h-\[calc\(100vh-4rem\)\]{min-height:calc(100vh - 4rem)}.min-h-screen{min-height:100vh}.w-1\/2{width:50%}.w-10{width:2.5rem}.w-11{width:2.75rem}.w-12{width:3rem}.w-14{width:3.5rem}.w-16{width:4rem}.w-2{width:.5rem}.w-2\.5{width:.625rem}.w-20{width:5rem}.w-24{width:6rem}.w-3{width:.75rem}.w-3\.5{width:.875rem}.w-3\/4{width:75%}.w-32{width:8rem}.w-4{width:1rem}.w-48{width:12rem}.w-5{width:1.25rem}.w-6{width:1.5rem}.w-64{width:16rem}.w-7{width:1.75rem}.w-8{width:2rem}.w-\[300px\]{width:300px}.w-auto{width:auto}.w-full{width:100%}.w-px{width:1px}.min-w-0{min-width:0px}.max-w-7xl{max-width:80rem}.max-w-\[180px\]{max-width:180px}.max-w-full{max-width:100%}.max-w-md{max-width:28rem}.max-w-sm{max-width:24rem}.max-w-xl{max-width:36rem}.flex-1{flex:1 1 0%}.flex-shrink-0,.shrink-0{flex-shrink:0}.origin-left{transform-origin:left}.-translate-x-1\/2{--tw-translate-x: -50%;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skew(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.-translate-y-1\/2{--tw-translate-y: -50%;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skew(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.translate-x-0{--tw-translate-x: 0px;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skew(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.translate-x-5{--tw-translate-x: 1.25rem;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skew(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}.transform{transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skew(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}@keyframes pulse{50%{opacity:.5}}.animate-pulse{animation:pulse 2s cubic-bezier(.4,0,.6,1) infinite}@keyframes spin{to{transform:rotate(360deg)}}.animate-spin{animation:spin 1s linear infinite}.cursor-help{cursor:help}.cursor-not-allowed{cursor:not-allowed}.cursor-pointer{cursor:pointer}.select-none{-webkit-user-select:none;-moz-user-select:none;user-select:none}.resize-none{resize:none}.appearance-none{-webkit-appearance:none;-moz-appearance:none;appearance:none}.grid-cols-1{grid-template-columns:repeat(1,minmax(0,1fr))}.grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.grid-cols-3{grid-template-columns:repeat(3,minmax(0,1fr))}.grid-cols-4{grid-template-columns:repeat(4,minmax(0,1fr))}.flex-col{flex-direction:column}.flex-wrap{flex-wrap:wrap}.items-start{align-items:flex-start}.items-center{align-items:center}.justify-end{justify-content:flex-end}.justify-center{justify-content:center}.justify-between{justify-content:space-between}.gap-1{gap:.25rem}.gap-1\.5{gap:.375rem}.gap-2{gap:.5rem}.gap-3{gap:.75rem}.gap-4{gap:1rem}.gap-6{gap:1.5rem}.gap-8{gap:2rem}.space-y-0\.5>:not([hidden])~:not([hidden]){--tw-space-y-reverse: 0;margin-top:calc(.125rem * calc(1 - var(--tw-space-y-reverse)));margin-bottom:calc(.125rem * var(--tw-space-y-reverse))}.space-y-1>:not([hidden])~:not([hidden]){--tw-space-y-reverse: 0;margin-top:calc(.25rem * calc(1 - var(--tw-space-y-reverse)));margin-bottom:calc(.25rem * var(--tw-space-y-reverse))}.space-y-1\.5>:not([hidden])~:not([hidden]){--tw-space-y-reverse: 0;margin-top:calc(.375rem * calc(1 - var(--tw-space-y-reverse)));margin-bottom:calc(.375rem * var(--tw-space-y-reverse))}.space-y-2>:not([hidden])~:not([hidden]){--tw-space-y-reverse: 0;margin-top:calc(.5rem * calc(1 - var(--tw-space-y-reverse)));margin-bottom:calc(.5rem * var(--tw-space-y-reverse))}.space-y-3>:not([hidden])~:not([hidden]){--tw-space-y-reverse: 0;margin-top:calc(.75rem * calc(1 - var(--tw-space-y-reverse)));margin-bottom:calc(.75rem * var(--tw-space-y-reverse))}.space-y-4>:not([hidden])~:not([hidden]){--tw-space-y-reverse: 0;margin-top:calc(1rem * calc(1 - var(--tw-space-y-reverse)));margin-bottom:calc(1rem * var(--tw-space-y-reverse))}.space-y-6>:not([hidden])~:not([hidden]){--tw-space-y-reverse: 0;margin-top:calc(1.5rem * calc(1 - var(--tw-space-y-reverse)));margin-bottom:calc(1.5rem * var(--tw-space-y-reverse))}.space-y-8>:not([hidden])~:not([hidden]){--tw-space-y-reverse: 0;margin-top:calc(2rem * calc(1 - var(--tw-space-y-reverse)));margin-bottom:calc(2rem * var(--tw-space-y-reverse))}.divide-x>:not([hidden])~:not([hidden]){--tw-divide-x-reverse: 0;border-right-width:calc(1px * var(--tw-divide-x-reverse));border-left-width:calc(1px * calc(1 - var(--tw-divide-x-reverse)))}.divide-y>:not([hidden])~:not([hidden]){--tw-divide-y-reverse: 0;border-top-width:calc(1px * calc(1 - var(--tw-divide-y-reverse)));border-bottom-width:calc(1px * var(--tw-divide-y-reverse))}.divide-border>:not([hidden])~:not([hidden]){border-color:hsl(var(--border))}.overflow-hidden{overflow:hidden}.overflow-x-auto{overflow-x:auto}.overflow-y-auto{overflow-y:auto}.truncate{overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.rounded{border-radius:.25rem}.rounded-2xl{border-radius:1rem}.rounded-full{border-radius:9999px}.rounded-lg{border-radius:var(--radius)}.rounded-md{border-radius:calc(var(--radius) - 2px)}.rounded-xl{border-radius:.75rem}.rounded-b-lg{border-bottom-right-radius:var(--radius);border-bottom-left-radius:var(--radius)}.border{border-width:1px}.border-0{border-width:0px}.border-2{border-width:2px}.border-b{border-bottom-width:1px}.border-r{border-right-width:1px}.border-t{border-top-width:1px}.border-dashed{border-style:dashed}.border-amber-500\/20{border-color:#f59e0b33}.border-amber-500\/30{border-color:#f59e0b4d}.border-blue-500\/20{border-color:#3b82f633}.border-blue-500\/30{border-color:#3b82f64d}.border-border{border-color:hsl(var(--border))}.border-emerald-500\/20{border-color:#10b98133}.border-emerald-500\/30{border-color:#10b9814d}.border-green-500\/20{border-color:#22c55e33}.border-input{border-color:hsl(var(--input))}.border-red-500\/20{border-color:#ef444433}.border-red-500\/30{border-color:#ef44444d}.border-slate-500\/20{border-color:#64748b33}.bg-amber-500{--tw-bg-opacity: 1;background-color:rgb(245 158 11 / var(--tw-bg-opacity, 1))}.bg-amber-500\/10{background-color:#f59e0b1a}.bg-amber-500\/20{background-color:#f59e0b33}.bg-amber-500\/5{background-color:#f59e0b0d}.bg-background{background-color:hsl(var(--background))}.bg-background\/60{background-color:hsl(var(--background) / .6)}.bg-background\/80{background-color:hsl(var(--background) / .8)}.bg-black\/50{background-color:#00000080}.bg-black\/60{background-color:#0009}.bg-blue-500{--tw-bg-opacity: 1;background-color:rgb(59 130 246 / var(--tw-bg-opacity, 1))}.bg-blue-500\/10{background-color:#3b82f61a}.bg-blue-500\/20{background-color:#3b82f633}.bg-border{background-color:hsl(var(--border))}.bg-card{background-color:hsl(var(--card))}.bg-card\/80{background-color:hsl(var(--card) / .8)}.bg-card\/95{background-color:hsl(var(--card) / .95)}.bg-cyan-500{--tw-bg-opacity: 1;background-color:rgb(6 182 212 / var(--tw-bg-opacity, 1))}.bg-cyan-500\/10{background-color:#06b6d41a}.bg-cyan-500\/20{background-color:#06b6d433}.bg-destructive\/10{background-color:hsl(var(--destructive) / .1)}.bg-emerald-500{--tw-bg-opacity: 1;background-color:rgb(16 185 129 / var(--tw-bg-opacity, 1))}.bg-emerald-500\/10{background-color:#10b9811a}.bg-gray-500\/20{background-color:#6b728033}.bg-green-500{--tw-bg-opacity: 1;background-color:rgb(34 197 94 / var(--tw-bg-opacity, 1))}.bg-green-500\/20{background-color:#22c55e33}.bg-green-500\/5{background-color:#22c55e0d}.bg-muted{background-color:hsl(var(--muted))}.bg-muted\/20{background-color:hsl(var(--muted) / .2)}.bg-muted\/30{background-color:hsl(var(--muted) / .3)}.bg-muted\/50{background-color:hsl(var(--muted) / .5)}.bg-orange-500{--tw-bg-opacity: 1;background-color:rgb(249 115 22 / var(--tw-bg-opacity, 1))}.bg-orange-500\/20{background-color:#f9731633}.bg-pink-500{--tw-bg-opacity: 1;background-color:rgb(236 72 153 / var(--tw-bg-opacity, 1))}.bg-primary{background-color:hsl(var(--primary))}.bg-primary\/10{background-color:hsl(var(--primary) / .1)}.bg-purple-500{--tw-bg-opacity: 1;background-color:rgb(168 85 247 / var(--tw-bg-opacity, 1))}.bg-purple-500\/10{background-color:#a855f71a}.bg-purple-500\/20{background-color:#a855f733}.bg-red-500{--tw-bg-opacity: 1;background-color:rgb(239 68 68 / var(--tw-bg-opacity, 1))}.bg-red-500\/10{background-color:#ef44441a}.bg-red-500\/20{background-color:#ef444433}.bg-red-500\/5{background-color:#ef44440d}.bg-white{--tw-bg-opacity: 1;background-color:rgb(255 255 255 / var(--tw-bg-opacity, 1))}.bg-white\/90{background-color:#ffffffe6}.bg-gradient-to-br{background-image:linear-gradient(to bottom right,var(--tw-gradient-stops))}.from-background{--tw-gradient-from: hsl(var(--background)) var(--tw-gradient-from-position);--tw-gradient-to: hsl(var(--background) / 0) var(--tw-gradient-to-position);--tw-gradient-stops: var(--tw-gradient-from), var(--tw-gradient-to)}.from-slate-500\/5{--tw-gradient-from: rgb(100 116 139 / .05) var(--tw-gradient-from-position);--tw-gradient-to: rgb(100 116 139 / 0) var(--tw-gradient-to-position);--tw-gradient-stops: var(--tw-gradient-from), var(--tw-gradient-to)}.to-muted{--tw-gradient-to: hsl(var(--muted)) var(--tw-gradient-to-position)}.to-zinc-500\/10{--tw-gradient-to: rgb(113 113 122 / .1) var(--tw-gradient-to-position)}.object-contain{-o-object-fit:contain;object-fit:contain}.p-1{padding:.25rem}.p-1\.5{padding:.375rem}.p-2{padding:.5rem}.p-2\.5{padding:.625rem}.p-3{padding:.75rem}.p-4{padding:1rem}.p-5{padding:1.25rem}.p-6{padding:1.5rem}.p-8{padding:2rem}.px-1\.5{padding-left:.375rem;padding-right:.375rem}.px-2{padding-left:.5rem;padding-right:.5rem}.px-2\.5{padding-left:.625rem;padding-right:.625rem}.px-3{padding-left:.75rem;padding-right:.75rem}.px-4{padding-left:1rem;padding-right:1rem}.px-5{padding-left:1.25rem;padding-right:1.25rem}.px-6{padding-left:1.5rem;padding-right:1.5rem}.py-0\.5{padding-top:.125rem;padding-bottom:.125rem}.py-1{padding-top:.25rem;padding-bottom:.25rem}.py-1\.5{padding-top:.375rem;padding-bottom:.375rem}.py-12{padding-top:3rem;padding-bottom:3rem}.py-2{padding-top:.5rem;padding-bottom:.5rem}.py-2\.5{padding-top:.625rem;padding-bottom:.625rem}.py-20{padding-top:5rem;padding-bottom:5rem}.py-3{padding-top:.75rem;padding-bottom:.75rem}.py-4{padding-top:1rem;padding-bottom:1rem}.py-8{padding-top:2rem;padding-bottom:2rem}.pl-10{padding-left:2.5rem}.pr-10{padding-right:2.5rem}.pr-4{padding-right:1rem}.pt-1{padding-top:.25rem}.pt-16{padding-top:4rem}.pt-20{padding-top:5rem}.pt-3{padding-top:.75rem}.pt-4{padding-top:1rem}.pt-6{padding-top:1.5rem}.text-left{text-align:left}.text-center{text-align:center}.text-right{text-align:right}.font-mono{font-family:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace}.text-2xl{font-size:1.5rem;line-height:2rem}.text-3xl{font-size:1.875rem;line-height:2.25rem}.text-lg{font-size:1.125rem;line-height:1.75rem}.text-sm{font-size:.875rem;line-height:1.25rem}.text-xs{font-size:.75rem;line-height:1rem}.font-bold{font-weight:700}.font-medium{font-weight:500}.font-normal{font-weight:400}.font-semibold{font-weight:600}.uppercase{text-transform:uppercase}.capitalize{text-transform:capitalize}.italic{font-style:italic}.leading-relaxed{line-height:1.625}.tracking-wide{letter-spacing:.025em}.text-amber-500{--tw-text-opacity: 1;color:rgb(245 158 11 / var(--tw-text-opacity, 1))}.text-amber-600{--tw-text-opacity: 1;color:rgb(217 119 6 / var(--tw-text-opacity, 1))}.text-amber-600\/90{color:#d97706e6}.text-amber-700{--tw-text-opacity: 1;color:rgb(180 83 9 / var(--tw-text-opacity, 1))}.text-blue-500{--tw-text-opacity: 1;color:rgb(59 130 246 / var(--tw-text-opacity, 1))}.text-blue-600{--tw-text-opacity: 1;color:rgb(37 99 235 / var(--tw-text-opacity, 1))}.text-cyan-500{--tw-text-opacity: 1;color:rgb(6 182 212 / var(--tw-text-opacity, 1))}.text-destructive{color:hsl(var(--destructive))}.text-emerald-500{--tw-text-opacity: 1;color:rgb(16 185 129 / var(--tw-text-opacity, 1))}.text-emerald-600{--tw-text-opacity: 1;color:rgb(5 150 105 / var(--tw-text-opacity, 1))}.text-foreground{color:hsl(var(--foreground))}.text-gray-500{--tw-text-opacity: 1;color:rgb(107 114 128 / var(--tw-text-opacity, 1))}.text-gray-600{--tw-text-opacity: 1;color:rgb(75 85 99 / var(--tw-text-opacity, 1))}.text-gray-700{--tw-text-opacity: 1;color:rgb(55 65 81 / var(--tw-text-opacity, 1))}.text-green-500{--tw-text-opacity: 1;color:rgb(34 197 94 / var(--tw-text-opacity, 1))}.text-muted-foreground{color:hsl(var(--muted-foreground))}.text-muted-foreground\/30{color:hsl(var(--muted-foreground) / .3)}.text-muted-foreground\/50{color:hsl(var(--muted-foreground) / .5)}.text-muted-foreground\/70{color:hsl(var(--muted-foreground) / .7)}.text-orange-500{--tw-text-opacity: 1;color:rgb(249 115 22 / var(--tw-text-opacity, 1))}.text-pink-500{--tw-text-opacity: 1;color:rgb(236 72 153 / var(--tw-text-opacity, 1))}.text-primary{color:hsl(var(--primary))}.text-primary-foreground{color:hsl(var(--primary-foreground))}.text-purple-500{--tw-text-opacity: 1;color:rgb(168 85 247 / var(--tw-text-opacity, 1))}.text-red-500{--tw-text-opacity: 1;color:rgb(239 68 68 / var(--tw-text-opacity, 1))}.text-red-600{--tw-text-opacity: 1;color:rgb(220 38 38 / var(--tw-text-opacity, 1))}.text-white{--tw-text-opacity: 1;color:rgb(255 255 255 / var(--tw-text-opacity, 1))}.opacity-30{opacity:.3}.opacity-50{opacity:.5}.opacity-60{opacity:.6}.shadow-2xl{--tw-shadow: 0 25px 50px -12px rgb(0 0 0 / .25);--tw-shadow-colored: 0 25px 50px -12px var(--tw-shadow-color);box-shadow:var(--tw-ring-offset-shadow, 0 0 #0000),var(--tw-ring-shadow, 0 0 #0000),var(--tw-shadow)}.shadow-lg{--tw-shadow: 0 10px 15px -3px rgb(0 0 0 / .1), 0 4px 6px -4px rgb(0 0 0 / .1);--tw-shadow-colored: 0 10px 15px -3px var(--tw-shadow-color), 0 4px 6px -4px var(--tw-shadow-color);box-shadow:var(--tw-ring-offset-shadow, 0 0 #0000),var(--tw-ring-shadow, 0 0 #0000),var(--tw-shadow)}.shadow-sm{--tw-shadow: 0 1px 2px 0 rgb(0 0 0 / .05);--tw-shadow-colored: 0 1px 2px 0 var(--tw-shadow-color);box-shadow:var(--tw-ring-offset-shadow, 0 0 #0000),var(--tw-ring-shadow, 0 0 #0000),var(--tw-shadow)}.shadow-xl{--tw-shadow: 0 20px 25px -5px rgb(0 0 0 / .1), 0 8px 10px -6px rgb(0 0 0 / .1);--tw-shadow-colored: 0 20px 25px -5px var(--tw-shadow-color), 0 8px 10px -6px var(--tw-shadow-color);box-shadow:var(--tw-ring-offset-shadow, 0 0 #0000),var(--tw-ring-shadow, 0 0 #0000),var(--tw-shadow)}.filter{filter:var(--tw-blur) var(--tw-brightness) var(--tw-contrast) var(--tw-grayscale) var(--tw-hue-rotate) var(--tw-invert) var(--tw-saturate) var(--tw-sepia) var(--tw-drop-shadow)}.backdrop-blur-\[1px\]{--tw-backdrop-blur: blur(1px);-webkit-backdrop-filter:var(--tw-backdrop-blur) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia);backdrop-filter:var(--tw-backdrop-blur) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia)}.backdrop-blur-sm{--tw-backdrop-blur: blur(4px);-webkit-backdrop-filter:var(--tw-backdrop-blur) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia);backdrop-filter:var(--tw-backdrop-blur) var(--tw-backdrop-brightness) var(--tw-backdrop-contrast) var(--tw-backdrop-grayscale) var(--tw-backdrop-hue-rotate) var(--tw-backdrop-invert) var(--tw-backdrop-opacity) var(--tw-backdrop-saturate) var(--tw-backdrop-sepia)}.transition{transition-property:color,background-color,border-color,text-decoration-color,fill,stroke,opacity,box-shadow,transform,filter,backdrop-filter;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.15s}.transition-all{transition-property:all;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.15s}.transition-colors{transition-property:color,background-color,border-color,text-decoration-color,fill,stroke;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.15s}.transition-transform{transition-property:transform;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.15s}.duration-200{transition-duration:.2s}.duration-300{transition-duration:.3s}::-webkit-scrollbar{width:8px;height:8px}::-webkit-scrollbar-track{background-color:hsl(var(--muted))}::-webkit-scrollbar-thumb{border-radius:9999px;background-color:hsl(var(--muted-foreground) / .3)}::-webkit-scrollbar-thumb:hover{background-color:hsl(var(--muted-foreground) / .5)}html{transition:color-scheme .2s ease}.leaflet-container{font-family:inherit;border-radius:.5rem}.leaflet-popup-content-wrapper{border-radius:.5rem;box-shadow:0 4px 6px -1px #0000001a,0 2px 4px -2px #0000001a}.leaflet-popup-content{margin:12px 14px}.dark .leaflet-popup-content-wrapper{background:#1e293b;color:#e2e8f0}.dark .leaflet-popup-tip{background:#1e293b}.dark .leaflet-popup-content a{color:#60a5fa}.custom-marker{background:transparent;border:none}.dark .leaflet-control-zoom a{background-color:#1e293b;color:#e2e8f0;border-color:#334155}.dark .leaflet-control-zoom a:hover{background-color:#334155}.dark .leaflet-control-attribution{background:#1e293bcc;color:#94a3b8}.dark .leaflet-control-attribution a{color:#60a5fa}.last\:border-0:last-child{border-width:0px}.hover\:bg-blue-600:hover{--tw-bg-opacity: 1;background-color:rgb(37 99 235 / var(--tw-bg-opacity, 1))}.hover\:bg-cyan-600:hover{--tw-bg-opacity: 1;background-color:rgb(8 145 178 / var(--tw-bg-opacity, 1))}.hover\:bg-green-500\/30:hover{background-color:#22c55e4d}.hover\:bg-muted:hover{background-color:hsl(var(--muted))}.hover\:bg-muted\/30:hover{background-color:hsl(var(--muted) / .3)}.hover\:bg-muted\/80:hover{background-color:hsl(var(--muted) / .8)}.hover\:bg-primary\/10:hover{background-color:hsl(var(--primary) / .1)}.hover\:bg-primary\/90:hover{background-color:hsl(var(--primary) / .9)}.hover\:bg-red-500\/10:hover{background-color:#ef44441a}.hover\:bg-red-500\/30:hover{background-color:#ef44444d}.hover\:bg-red-600:hover{--tw-bg-opacity: 1;background-color:rgb(220 38 38 / var(--tw-bg-opacity, 1))}.hover\:text-destructive:hover{color:hsl(var(--destructive))}.hover\:text-foreground:hover{color:hsl(var(--foreground))}.hover\:text-primary:hover{color:hsl(var(--primary))}.hover\:text-primary\/80:hover{color:hsl(var(--primary) / .8)}.hover\:text-red-500:hover{--tw-text-opacity: 1;color:rgb(239 68 68 / var(--tw-text-opacity, 1))}.hover\:underline:hover{text-decoration-line:underline}.focus\:border-primary:focus{border-color:hsl(var(--primary))}.focus\:outline-none:focus{outline:2px solid transparent;outline-offset:2px}.focus\:ring-2:focus{--tw-ring-offset-shadow: var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color);--tw-ring-shadow: var(--tw-ring-inset) 0 0 0 calc(2px + var(--tw-ring-offset-width)) var(--tw-ring-color);box-shadow:var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow, 0 0 #0000)}.focus\:ring-primary:focus{--tw-ring-color: hsl(var(--primary))}.focus\:ring-primary\/50:focus{--tw-ring-color: hsl(var(--primary) / .5)}.focus\:ring-offset-2:focus{--tw-ring-offset-width: 2px}.disabled\:cursor-not-allowed:disabled{cursor:not-allowed}.disabled\:opacity-50:disabled{opacity:.5}.dark\:border-slate-400\/20:is(.dark *){border-color:#94a3b833}.dark\:bg-slate-800\/90:is(.dark *){background-color:#1e293be6}.dark\:from-slate-400\/5:is(.dark *){--tw-gradient-from: rgb(148 163 184 / .05) var(--tw-gradient-from-position);--tw-gradient-to: rgb(148 163 184 / 0) var(--tw-gradient-to-position);--tw-gradient-stops: var(--tw-gradient-from), var(--tw-gradient-to)}.dark\:to-zinc-400\/10:is(.dark *){--tw-gradient-to: rgb(161 161 170 / .1) var(--tw-gradient-to-position)}.dark\:text-amber-300:is(.dark *){--tw-text-opacity: 1;color:rgb(252 211 77 / var(--tw-text-opacity, 1))}.dark\:text-amber-400:is(.dark *){--tw-text-opacity: 1;color:rgb(251 191 36 / var(--tw-text-opacity, 1))}.dark\:text-amber-400\/90:is(.dark *){color:#fbbf24e6}.dark\:text-blue-400:is(.dark *){--tw-text-opacity: 1;color:rgb(96 165 250 / var(--tw-text-opacity, 1))}.dark\:text-emerald-400:is(.dark *){--tw-text-opacity: 1;color:rgb(52 211 153 / var(--tw-text-opacity, 1))}.dark\:text-gray-300:is(.dark *){--tw-text-opacity: 1;color:rgb(209 213 219 / var(--tw-text-opacity, 1))}.dark\:text-gray-400:is(.dark *){--tw-text-opacity: 1;color:rgb(156 163 175 / var(--tw-text-opacity, 1))}.dark\:text-red-400:is(.dark *){--tw-text-opacity: 1;color:rgb(248 113 113 / var(--tw-text-opacity, 1))}@media (min-width: 640px){.sm\:block{display:block}.sm\:grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}}@media (min-width: 768px){.md\:grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.md\:grid-cols-3{grid-template-columns:repeat(3,minmax(0,1fr))}.md\:grid-cols-4{grid-template-columns:repeat(4,minmax(0,1fr))}}@media (min-width: 1024px){.lg\:col-span-2{grid-column:span 2 / span 2}.lg\:ml-\[300px\]{margin-left:300px}.lg\:block{display:block}.lg\:flex{display:flex}.lg\:hidden{display:none}.lg\:grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.lg\:grid-cols-3{grid-template-columns:repeat(3,minmax(0,1fr))}.lg\:grid-cols-4{grid-template-columns:repeat(4,minmax(0,1fr))}}
backend/static/assets/index-ffXrP8Fx.js ADDED
The diff for this file is too large to render. See raw diff
 
backend/static/box-logo.png ADDED