Spaces:
Sleeping
Sleeping
Parthnuwal7 commited on
Commit ·
2a317a3
1
Parent(s): e3ab915
Supabase
Browse files- app.py +3 -1
- database/add_semester_columns.sql +0 -9
- database/create_storage_bucket.sql +28 -0
- database/db.py +67 -32
- database/local_storage.py +226 -0
- database/migrate_domain_module.sql +0 -113
- database/migrate_scores_table.sql +0 -71
- database/migrate_to_text_fields.sql +0 -17
- database/schema.sql +0 -151
- database/storage_bucket_db.py +196 -0
app.py
CHANGED
|
@@ -20,14 +20,16 @@ app.register_blueprint(domain_bp, url_prefix='/api/analytics')
|
|
| 20 |
# Health check with database status
|
| 21 |
@app.route('/health', methods=['GET'])
|
| 22 |
def health_check():
|
| 23 |
-
from database.db import get_db_or_none, get_db_error
|
| 24 |
db = get_db_or_none()
|
| 25 |
db_status = 'connected' if db is not None else 'disconnected'
|
| 26 |
db_error = get_db_error()
|
|
|
|
| 27 |
|
| 28 |
return {
|
| 29 |
'status': 'healthy' if db else 'degraded',
|
| 30 |
'service': 'analytics-api',
|
|
|
|
| 31 |
'database': db_status,
|
| 32 |
'db_error': db_error if db_error else None
|
| 33 |
}, 200 if db else 503
|
|
|
|
| 20 |
# Health check with database status
|
| 21 |
@app.route('/health', methods=['GET'])
|
| 22 |
def health_check():
|
| 23 |
+
from database.db import get_db_or_none, get_db_error, get_storage_mode
|
| 24 |
db = get_db_or_none()
|
| 25 |
db_status = 'connected' if db is not None else 'disconnected'
|
| 26 |
db_error = get_db_error()
|
| 27 |
+
storage_mode = get_storage_mode()
|
| 28 |
|
| 29 |
return {
|
| 30 |
'status': 'healthy' if db else 'degraded',
|
| 31 |
'service': 'analytics-api',
|
| 32 |
+
'storage_mode': storage_mode, # 'supabase' or 'local'
|
| 33 |
'database': db_status,
|
| 34 |
'db_error': db_error if db_error else None
|
| 35 |
}, 200 if db else 503
|
database/add_semester_columns.sql
DELETED
|
@@ -1,9 +0,0 @@
|
|
| 1 |
-
-- Add missing semester columns to analytics_students table
|
| 2 |
-
-- Run this in Supabase SQL Editor
|
| 3 |
-
|
| 4 |
-
ALTER TABLE analytics_students
|
| 5 |
-
ADD COLUMN IF NOT EXISTS sgpa_sem1 REAL CHECK (sgpa_sem1 >= 0 AND sgpa_sem1 <= 10),
|
| 6 |
-
ADD COLUMN IF NOT EXISTS sgpa_sem2 REAL CHECK (sgpa_sem2 >= 0 AND sgpa_sem2 <= 10),
|
| 7 |
-
ADD COLUMN IF NOT EXISTS sgpa_sem3 REAL CHECK (sgpa_sem3 >= 0 AND sgpa_sem3 <= 10),
|
| 8 |
-
ADD COLUMN IF NOT EXISTS sgpa_sem7 REAL CHECK (sgpa_sem7 >= 0 AND sgpa_sem7 <= 10),
|
| 9 |
-
ADD COLUMN IF NOT EXISTS sgpa_sem8 REAL CHECK (sgpa_sem8 >= 0 AND sgpa_sem8 <= 10);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
database/create_storage_bucket.sql
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
-- Create Analytics Storage Bucket
|
| 2 |
+
-- Run this in Supabase SQL Editor
|
| 3 |
+
|
| 4 |
+
-- 1. Create a public storage bucket for analytics data
|
| 5 |
+
INSERT INTO storage.buckets (id, name, public)
|
| 6 |
+
VALUES ('analytics-data', 'analytics-data', true)
|
| 7 |
+
ON CONFLICT (id) DO NOTHING;
|
| 8 |
+
|
| 9 |
+
-- 2. Allow public read access (for API to fetch data)
|
| 10 |
+
CREATE POLICY "Allow public read access on analytics-data"
|
| 11 |
+
ON storage.objects FOR SELECT
|
| 12 |
+
USING (bucket_id = 'analytics-data');
|
| 13 |
+
|
| 14 |
+
-- 3. Allow authenticated upsert (for API to write data)
|
| 15 |
+
CREATE POLICY "Allow public insert on analytics-data"
|
| 16 |
+
ON storage.objects FOR INSERT
|
| 17 |
+
WITH CHECK (bucket_id = 'analytics-data');
|
| 18 |
+
|
| 19 |
+
CREATE POLICY "Allow public update on analytics-data"
|
| 20 |
+
ON storage.objects FOR UPDATE
|
| 21 |
+
USING (bucket_id = 'analytics-data');
|
| 22 |
+
|
| 23 |
+
CREATE POLICY "Allow public delete on analytics-data"
|
| 24 |
+
ON storage.objects FOR DELETE
|
| 25 |
+
USING (bucket_id = 'analytics-data');
|
| 26 |
+
|
| 27 |
+
-- Verify bucket was created
|
| 28 |
+
SELECT * FROM storage.buckets WHERE id = 'analytics-data';
|
database/db.py
CHANGED
|
@@ -1,62 +1,97 @@
|
|
| 1 |
-
"""Database connection
|
| 2 |
import os
|
| 3 |
-
from supabase import create_client, Client
|
| 4 |
from config import Config
|
| 5 |
|
| 6 |
-
#
|
| 7 |
-
|
|
|
|
| 8 |
_db_error: str = None
|
| 9 |
|
| 10 |
-
def
|
| 11 |
-
"""
|
| 12 |
-
global
|
| 13 |
|
| 14 |
supabase_url = Config.SUPABASE_URL
|
| 15 |
supabase_key = Config.SUPABASE_KEY
|
| 16 |
|
| 17 |
-
# Log for debugging
|
| 18 |
print(f"[DB] SUPABASE_URL: {supabase_url[:30]}..." if supabase_url else "[DB] SUPABASE_URL: NOT SET")
|
| 19 |
-
print(f"[DB] SUPABASE_KEY: {'*' * 10}...{supabase_key[-10:]}" if supabase_key and len(supabase_key) > 10 else "[DB] SUPABASE_KEY: NOT SET
|
| 20 |
|
| 21 |
-
if not supabase_url or
|
| 22 |
-
_db_error = "
|
| 23 |
-
print(f"[DB
|
| 24 |
-
return
|
| 25 |
|
| 26 |
-
|
| 27 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 28 |
print(f"[DB ERROR] {_db_error}")
|
| 29 |
-
return
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 30 |
|
| 31 |
try:
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
|
|
|
|
|
|
| 35 |
except Exception as e:
|
| 36 |
-
_db_error = f"Failed to
|
| 37 |
print(f"[DB ERROR] {_db_error}")
|
| 38 |
-
return
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 39 |
|
| 40 |
# Initialize on module load
|
| 41 |
-
|
|
|
|
| 42 |
|
| 43 |
def get_db():
|
| 44 |
-
"""Get
|
| 45 |
-
global
|
| 46 |
|
| 47 |
-
if
|
| 48 |
-
|
| 49 |
-
_init_supabase()
|
| 50 |
|
| 51 |
-
if
|
| 52 |
-
raise RuntimeError(f"
|
| 53 |
|
| 54 |
-
return
|
|
|
|
| 55 |
|
| 56 |
def get_db_or_none():
|
| 57 |
-
"""Get
|
| 58 |
-
return
|
|
|
|
| 59 |
|
| 60 |
def get_db_error():
|
| 61 |
"""Get database initialization error if any"""
|
| 62 |
return _db_error
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Database connection with Storage Bucket + Local fallback"""
|
| 2 |
import os
|
|
|
|
| 3 |
from config import Config
|
| 4 |
|
| 5 |
+
# Storage mode: 'storage_bucket', 'local', or None
|
| 6 |
+
STORAGE_MODE = None
|
| 7 |
+
_db_client = None
|
| 8 |
_db_error: str = None
|
| 9 |
|
| 10 |
+
def _init_storage_bucket():
|
| 11 |
+
"""Try to initialize Supabase Storage Bucket DB"""
|
| 12 |
+
global STORAGE_MODE, _db_client, _db_error
|
| 13 |
|
| 14 |
supabase_url = Config.SUPABASE_URL
|
| 15 |
supabase_key = Config.SUPABASE_KEY
|
| 16 |
|
| 17 |
+
# Log for debugging
|
| 18 |
print(f"[DB] SUPABASE_URL: {supabase_url[:30]}..." if supabase_url else "[DB] SUPABASE_URL: NOT SET")
|
| 19 |
+
print(f"[DB] SUPABASE_KEY: {'*' * 10}...{supabase_key[-10:]}" if supabase_key and len(supabase_key) > 10 else "[DB] SUPABASE_KEY: NOT SET")
|
| 20 |
|
| 21 |
+
if not supabase_url or not supabase_key or supabase_key == '':
|
| 22 |
+
_db_error = "SUPABASE credentials not set"
|
| 23 |
+
print(f"[DB] {_db_error} - falling back to local storage")
|
| 24 |
+
return False
|
| 25 |
|
| 26 |
+
try:
|
| 27 |
+
from supabase import create_client
|
| 28 |
+
from database.storage_bucket_db import StorageBucketDB
|
| 29 |
+
|
| 30 |
+
# Create Supabase client
|
| 31 |
+
supabase_client = create_client(supabase_url, supabase_key)
|
| 32 |
+
|
| 33 |
+
# Wrap with StorageBucketDB to use Storage instead of DB tables
|
| 34 |
+
_db_client = StorageBucketDB(supabase_client)
|
| 35 |
+
STORAGE_MODE = 'storage_bucket'
|
| 36 |
+
print("[DB] Using Supabase STORAGE BUCKET (analytics-data bucket)")
|
| 37 |
+
return True
|
| 38 |
+
except Exception as e:
|
| 39 |
+
_db_error = f"Failed to initialize storage bucket: {str(e)}"
|
| 40 |
print(f"[DB ERROR] {_db_error}")
|
| 41 |
+
return False
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def _init_local():
|
| 45 |
+
"""Initialize local file storage as fallback"""
|
| 46 |
+
global STORAGE_MODE, _db_client, _db_error
|
| 47 |
|
| 48 |
try:
|
| 49 |
+
from database.local_storage import get_local_storage
|
| 50 |
+
_db_client = get_local_storage()
|
| 51 |
+
STORAGE_MODE = 'local'
|
| 52 |
+
print("[DB] Using LOCAL FILE STORAGE (data saved to /tmp/analytics_data)")
|
| 53 |
+
return True
|
| 54 |
except Exception as e:
|
| 55 |
+
_db_error = f"Failed to initialize local storage: {str(e)}"
|
| 56 |
print(f"[DB ERROR] {_db_error}")
|
| 57 |
+
return False
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def _init_db():
|
| 61 |
+
"""Initialize database - tries Storage Bucket first, falls back to local"""
|
| 62 |
+
if not _init_storage_bucket():
|
| 63 |
+
_init_local()
|
| 64 |
+
|
| 65 |
|
| 66 |
# Initialize on module load
|
| 67 |
+
_init_db()
|
| 68 |
+
|
| 69 |
|
| 70 |
def get_db():
|
| 71 |
+
"""Get database client instance"""
|
| 72 |
+
global _db_client, _db_error
|
| 73 |
|
| 74 |
+
if _db_client is None:
|
| 75 |
+
_init_db()
|
|
|
|
| 76 |
|
| 77 |
+
if _db_client is None:
|
| 78 |
+
raise RuntimeError(f"No database available: {_db_error}")
|
| 79 |
|
| 80 |
+
return _db_client
|
| 81 |
+
|
| 82 |
|
| 83 |
def get_db_or_none():
|
| 84 |
+
"""Get database client or None (for health checks)"""
|
| 85 |
+
return _db_client
|
| 86 |
+
|
| 87 |
|
| 88 |
def get_db_error():
|
| 89 |
"""Get database initialization error if any"""
|
| 90 |
return _db_error
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def get_storage_mode():
|
| 94 |
+
"""Get current storage mode: 'storage_bucket', 'local', or None"""
|
| 95 |
+
return STORAGE_MODE
|
| 96 |
+
|
| 97 |
+
|
database/local_storage.py
ADDED
|
@@ -0,0 +1,226 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Local file-based storage fallback for analytics data"""
|
| 2 |
+
import json
|
| 3 |
+
import os
|
| 4 |
+
from datetime import datetime
|
| 5 |
+
from typing import Dict, Any, Optional, List
|
| 6 |
+
import uuid
|
| 7 |
+
|
| 8 |
+
# Storage directory - works on HuggingFace Spaces in /tmp
|
| 9 |
+
STORAGE_DIR = os.getenv('ANALYTICS_STORAGE_DIR', '/tmp/analytics_data')
|
| 10 |
+
|
| 11 |
+
class LocalStorage:
|
| 12 |
+
"""Simple file-based storage that mimics Supabase responses"""
|
| 13 |
+
|
| 14 |
+
def __init__(self):
|
| 15 |
+
self.storage_dir = STORAGE_DIR
|
| 16 |
+
os.makedirs(self.storage_dir, exist_ok=True)
|
| 17 |
+
print(f"[LocalStorage] Initialized at: {self.storage_dir}")
|
| 18 |
+
|
| 19 |
+
# Initialize data files
|
| 20 |
+
self.students_file = os.path.join(self.storage_dir, 'students.json')
|
| 21 |
+
self.personality_file = os.path.join(self.storage_dir, 'personality.json')
|
| 22 |
+
self.text_file = os.path.join(self.storage_dir, 'text.json')
|
| 23 |
+
self.domain_file = os.path.join(self.storage_dir, 'domain.json')
|
| 24 |
+
|
| 25 |
+
# Load existing data
|
| 26 |
+
self._students = self._load(self.students_file, {})
|
| 27 |
+
self._personality = self._load(self.personality_file, {})
|
| 28 |
+
self._text = self._load(self.text_file, {})
|
| 29 |
+
self._domain = self._load(self.domain_file, {})
|
| 30 |
+
|
| 31 |
+
def _load(self, filepath: str, default: Any) -> Any:
|
| 32 |
+
"""Load JSON file or return default"""
|
| 33 |
+
try:
|
| 34 |
+
if os.path.exists(filepath):
|
| 35 |
+
with open(filepath, 'r') as f:
|
| 36 |
+
return json.load(f)
|
| 37 |
+
except Exception as e:
|
| 38 |
+
print(f"[LocalStorage] Error loading {filepath}: {e}")
|
| 39 |
+
return default
|
| 40 |
+
|
| 41 |
+
def _save(self, filepath: str, data: Any):
|
| 42 |
+
"""Save data to JSON file"""
|
| 43 |
+
try:
|
| 44 |
+
with open(filepath, 'w') as f:
|
| 45 |
+
json.dump(data, f, indent=2, default=str)
|
| 46 |
+
except Exception as e:
|
| 47 |
+
print(f"[LocalStorage] Error saving {filepath}: {e}")
|
| 48 |
+
|
| 49 |
+
def table(self, table_name: str) -> 'LocalTable':
|
| 50 |
+
"""Return a table-like interface"""
|
| 51 |
+
return LocalTable(self, table_name)
|
| 52 |
+
|
| 53 |
+
def get_data(self, table_name: str) -> Dict:
|
| 54 |
+
"""Get raw data for a table"""
|
| 55 |
+
mapping = {
|
| 56 |
+
'analytics_students': (self._students, self.students_file),
|
| 57 |
+
'analytics_personality_responses': (self._personality, self.personality_file),
|
| 58 |
+
'analytics_text_responses': (self._text, self.text_file),
|
| 59 |
+
'analytics_domain_evidence': (self._domain, self.domain_file),
|
| 60 |
+
}
|
| 61 |
+
return mapping.get(table_name, ({}, None))
|
| 62 |
+
|
| 63 |
+
def set_data(self, table_name: str, key: str, value: Dict):
|
| 64 |
+
"""Set data for a table"""
|
| 65 |
+
data, filepath = self.get_data(table_name)
|
| 66 |
+
if data is not None:
|
| 67 |
+
data[key] = value
|
| 68 |
+
self._save(filepath, data)
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class LocalTable:
|
| 72 |
+
"""Mimics Supabase table interface for local storage"""
|
| 73 |
+
|
| 74 |
+
def __init__(self, storage: LocalStorage, table_name: str):
|
| 75 |
+
self.storage = storage
|
| 76 |
+
self.table_name = table_name
|
| 77 |
+
self._query = {}
|
| 78 |
+
self._select_cols = '*'
|
| 79 |
+
|
| 80 |
+
def select(self, columns: str = '*') -> 'LocalTable':
|
| 81 |
+
self._select_cols = columns
|
| 82 |
+
return self
|
| 83 |
+
|
| 84 |
+
def eq(self, column: str, value: Any) -> 'LocalTable':
|
| 85 |
+
self._query[column] = value
|
| 86 |
+
return self
|
| 87 |
+
|
| 88 |
+
def maybe_single(self) -> 'LocalTable':
|
| 89 |
+
self._single = True
|
| 90 |
+
return self
|
| 91 |
+
|
| 92 |
+
def single(self) -> 'LocalTable':
|
| 93 |
+
self._single = True
|
| 94 |
+
return self
|
| 95 |
+
|
| 96 |
+
def execute(self) -> 'LocalResult':
|
| 97 |
+
"""Execute the query or write operation"""
|
| 98 |
+
# Check if this is a write operation
|
| 99 |
+
if hasattr(self, '_insert_data'):
|
| 100 |
+
return self._do_insert()
|
| 101 |
+
if hasattr(self, '_upsert_data'):
|
| 102 |
+
return self._do_upsert()
|
| 103 |
+
if hasattr(self, '_update_data'):
|
| 104 |
+
return self._do_update()
|
| 105 |
+
|
| 106 |
+
# Otherwise it's a read/select operation
|
| 107 |
+
data, _ = self.storage.get_data(self.table_name)
|
| 108 |
+
|
| 109 |
+
if self._query:
|
| 110 |
+
# Filter by query
|
| 111 |
+
results = []
|
| 112 |
+
for key, record in data.items():
|
| 113 |
+
match = all(record.get(k) == v for k, v in self._query.items())
|
| 114 |
+
if match:
|
| 115 |
+
results.append(record)
|
| 116 |
+
|
| 117 |
+
if getattr(self, '_single', False) and results:
|
| 118 |
+
return LocalResult(results[0])
|
| 119 |
+
elif getattr(self, '_single', False):
|
| 120 |
+
return LocalResult(None)
|
| 121 |
+
return LocalResult(results)
|
| 122 |
+
else:
|
| 123 |
+
# Return all
|
| 124 |
+
return LocalResult(list(data.values()))
|
| 125 |
+
|
| 126 |
+
def insert(self, record: Dict) -> 'LocalTable':
|
| 127 |
+
"""Insert a record"""
|
| 128 |
+
self._insert_data = record
|
| 129 |
+
return self
|
| 130 |
+
|
| 131 |
+
def upsert(self, record: Dict) -> 'LocalTable':
|
| 132 |
+
"""Upsert a record"""
|
| 133 |
+
self._upsert_data = record
|
| 134 |
+
return self
|
| 135 |
+
|
| 136 |
+
def update(self, record: Dict) -> 'LocalTable':
|
| 137 |
+
"""Update records"""
|
| 138 |
+
self._update_data = record
|
| 139 |
+
return self
|
| 140 |
+
|
| 141 |
+
def _do_insert(self) -> 'LocalResult':
|
| 142 |
+
"""Perform insert"""
|
| 143 |
+
record = getattr(self, '_insert_data', {})
|
| 144 |
+
data, filepath = self.storage.get_data(self.table_name)
|
| 145 |
+
|
| 146 |
+
# Generate ID if needed
|
| 147 |
+
if 'id' not in record:
|
| 148 |
+
record['id'] = str(uuid.uuid4())
|
| 149 |
+
|
| 150 |
+
# Use student_id as key if present
|
| 151 |
+
key = record.get('student_id', record.get('id'))
|
| 152 |
+
record['created_at'] = datetime.utcnow().isoformat()
|
| 153 |
+
|
| 154 |
+
data[key] = record
|
| 155 |
+
self.storage._save(filepath, data)
|
| 156 |
+
|
| 157 |
+
print(f"[LocalStorage] Inserted into {self.table_name}: {key}")
|
| 158 |
+
return LocalResult([record])
|
| 159 |
+
|
| 160 |
+
def _do_upsert(self) -> 'LocalResult':
|
| 161 |
+
"""Perform upsert"""
|
| 162 |
+
record = getattr(self, '_upsert_data', {})
|
| 163 |
+
data, filepath = self.storage.get_data(self.table_name)
|
| 164 |
+
|
| 165 |
+
# Use student_id as key
|
| 166 |
+
key = record.get('student_id', str(uuid.uuid4()))
|
| 167 |
+
|
| 168 |
+
# Merge with existing if present
|
| 169 |
+
if key in data:
|
| 170 |
+
existing = data[key]
|
| 171 |
+
existing.update(record)
|
| 172 |
+
existing['updated_at'] = datetime.utcnow().isoformat()
|
| 173 |
+
record = existing
|
| 174 |
+
else:
|
| 175 |
+
record['id'] = str(uuid.uuid4())
|
| 176 |
+
record['created_at'] = datetime.utcnow().isoformat()
|
| 177 |
+
|
| 178 |
+
data[key] = record
|
| 179 |
+
self.storage._save(filepath, data)
|
| 180 |
+
|
| 181 |
+
print(f"[LocalStorage] Upserted into {self.table_name}: {key}")
|
| 182 |
+
return LocalResult([record])
|
| 183 |
+
|
| 184 |
+
def _do_update(self) -> 'LocalResult':
|
| 185 |
+
"""Perform update on matching records"""
|
| 186 |
+
updates = getattr(self, '_update_data', {})
|
| 187 |
+
data, filepath = self.storage.get_data(self.table_name)
|
| 188 |
+
updated = []
|
| 189 |
+
|
| 190 |
+
for key, record in data.items():
|
| 191 |
+
match = all(record.get(k) == v for k, v in self._query.items())
|
| 192 |
+
if match:
|
| 193 |
+
record.update(updates)
|
| 194 |
+
record['updated_at'] = datetime.utcnow().isoformat()
|
| 195 |
+
updated.append(record)
|
| 196 |
+
|
| 197 |
+
self.storage._save(filepath, data)
|
| 198 |
+
print(f"[LocalStorage] Updated {len(updated)} records in {self.table_name}")
|
| 199 |
+
return LocalResult(updated)
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
class LocalResult:
|
| 203 |
+
"""Mimics Supabase result object"""
|
| 204 |
+
|
| 205 |
+
def __init__(self, data: Any):
|
| 206 |
+
if data is None:
|
| 207 |
+
self.data = None
|
| 208 |
+
elif isinstance(data, list):
|
| 209 |
+
self.data = data
|
| 210 |
+
else:
|
| 211 |
+
self.data = data
|
| 212 |
+
|
| 213 |
+
def execute(self) -> 'LocalResult':
|
| 214 |
+
"""For chained calls that end with execute()"""
|
| 215 |
+
return self
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
# Singleton instance
|
| 219 |
+
_local_storage: LocalStorage = None
|
| 220 |
+
|
| 221 |
+
def get_local_storage() -> LocalStorage:
|
| 222 |
+
"""Get or create local storage instance"""
|
| 223 |
+
global _local_storage
|
| 224 |
+
if _local_storage is None:
|
| 225 |
+
_local_storage = LocalStorage()
|
| 226 |
+
return _local_storage
|
database/migrate_domain_module.sql
DELETED
|
@@ -1,113 +0,0 @@
|
|
| 1 |
-
-- Domain Module Migration Script
|
| 2 |
-
-- Run this in Supabase SQL Editor to add domain-specific scoring support
|
| 3 |
-
-- Date: December 9, 2025
|
| 4 |
-
|
| 5 |
-
-- ============================================================================
|
| 6 |
-
-- STEP 1: Add domain fields to existing analytics_students table
|
| 7 |
-
-- ============================================================================
|
| 8 |
-
|
| 9 |
-
ALTER TABLE analytics_students
|
| 10 |
-
ADD COLUMN IF NOT EXISTS active_domain TEXT CHECK (active_domain IN ('tech', 'business', 'creative', 'research', NULL)),
|
| 11 |
-
ADD COLUMN IF NOT EXISTS domain_score REAL CHECK (domain_score >= 0 AND domain_score <= 1),
|
| 12 |
-
ADD COLUMN IF NOT EXISTS domain_confidence REAL CHECK (domain_confidence >= 0 AND domain_confidence <= 1);
|
| 13 |
-
|
| 14 |
-
-- ============================================================================
|
| 15 |
-
-- STEP 2: Create domain evidence table
|
| 16 |
-
-- ============================================================================
|
| 17 |
-
|
| 18 |
-
CREATE TABLE IF NOT EXISTS analytics_domain_evidence (
|
| 19 |
-
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
| 20 |
-
student_id TEXT REFERENCES analytics_students(student_id) ON DELETE CASCADE,
|
| 21 |
-
domain_type TEXT NOT NULL CHECK (domain_type IN ('tech', 'business', 'creative', 'research')),
|
| 22 |
-
evidence_data JSONB NOT NULL,
|
| 23 |
-
domain_score REAL CHECK (domain_score >= 0 AND domain_score <= 1),
|
| 24 |
-
domain_confidence REAL CHECK (domain_confidence >= 0 AND domain_confidence <= 1),
|
| 25 |
-
raw_features JSONB,
|
| 26 |
-
processing_status TEXT DEFAULT 'pending' CHECK (processing_status IN ('pending', 'processing', 'completed', 'failed')),
|
| 27 |
-
error_message TEXT,
|
| 28 |
-
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
| 29 |
-
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
| 30 |
-
UNIQUE(student_id, domain_type)
|
| 31 |
-
);
|
| 32 |
-
|
| 33 |
-
-- ============================================================================
|
| 34 |
-
-- STEP 3: Create indexes for performance
|
| 35 |
-
-- ============================================================================
|
| 36 |
-
|
| 37 |
-
CREATE INDEX IF NOT EXISTS idx_domain_evidence_student ON analytics_domain_evidence(student_id);
|
| 38 |
-
CREATE INDEX IF NOT EXISTS idx_domain_evidence_type ON analytics_domain_evidence(domain_type);
|
| 39 |
-
CREATE INDEX IF NOT EXISTS idx_domain_evidence_status ON analytics_domain_evidence(processing_status);
|
| 40 |
-
|
| 41 |
-
-- ============================================================================
|
| 42 |
-
-- STEP 4: Enable Row Level Security
|
| 43 |
-
-- ============================================================================
|
| 44 |
-
|
| 45 |
-
ALTER TABLE analytics_domain_evidence ENABLE ROW LEVEL SECURITY;
|
| 46 |
-
|
| 47 |
-
-- ============================================================================
|
| 48 |
-
-- STEP 5: Create RLS Policies for domain evidence
|
| 49 |
-
-- ============================================================================
|
| 50 |
-
|
| 51 |
-
-- Users can view their own domain evidence
|
| 52 |
-
CREATE POLICY "Users can view own domain evidence"
|
| 53 |
-
ON analytics_domain_evidence FOR SELECT
|
| 54 |
-
TO authenticated
|
| 55 |
-
USING (student_id IN (SELECT student_id FROM analytics_students WHERE user_id = auth.uid()));
|
| 56 |
-
|
| 57 |
-
-- Users can insert their own domain evidence
|
| 58 |
-
CREATE POLICY "Users can insert own domain evidence"
|
| 59 |
-
ON analytics_domain_evidence FOR INSERT
|
| 60 |
-
TO authenticated
|
| 61 |
-
WITH CHECK (student_id IN (SELECT student_id FROM analytics_students WHERE user_id = auth.uid()));
|
| 62 |
-
|
| 63 |
-
-- Users can update their own domain evidence
|
| 64 |
-
CREATE POLICY "Users can update own domain evidence"
|
| 65 |
-
ON analytics_domain_evidence FOR UPDATE
|
| 66 |
-
TO authenticated
|
| 67 |
-
USING (student_id IN (SELECT student_id FROM analytics_students WHERE user_id = auth.uid()));
|
| 68 |
-
|
| 69 |
-
-- Users can delete their own domain evidence
|
| 70 |
-
CREATE POLICY "Users can delete own domain evidence"
|
| 71 |
-
ON analytics_domain_evidence FOR DELETE
|
| 72 |
-
TO authenticated
|
| 73 |
-
USING (student_id IN (SELECT student_id FROM analytics_students WHERE user_id = auth.uid()));
|
| 74 |
-
|
| 75 |
-
-- ============================================================================
|
| 76 |
-
-- STEP 6: Verification queries (run these to verify successful migration)
|
| 77 |
-
-- ============================================================================
|
| 78 |
-
|
| 79 |
-
-- Check if columns were added
|
| 80 |
-
SELECT column_name, data_type
|
| 81 |
-
FROM information_schema.columns
|
| 82 |
-
WHERE table_name = 'analytics_students'
|
| 83 |
-
AND column_name IN ('active_domain', 'domain_score', 'domain_confidence');
|
| 84 |
-
|
| 85 |
-
-- Check if table was created
|
| 86 |
-
SELECT table_name
|
| 87 |
-
FROM information_schema.tables
|
| 88 |
-
WHERE table_name = 'analytics_domain_evidence';
|
| 89 |
-
|
| 90 |
-
-- Check if indexes were created
|
| 91 |
-
SELECT indexname
|
| 92 |
-
FROM pg_indexes
|
| 93 |
-
WHERE tablename = 'analytics_domain_evidence';
|
| 94 |
-
|
| 95 |
-
-- Check if RLS policies were created
|
| 96 |
-
SELECT policyname
|
| 97 |
-
FROM pg_policies
|
| 98 |
-
WHERE tablename = 'analytics_domain_evidence';
|
| 99 |
-
|
| 100 |
-
-- ============================================================================
|
| 101 |
-
-- Migration Complete!
|
| 102 |
-
-- ============================================================================
|
| 103 |
-
|
| 104 |
-
-- Expected results:
|
| 105 |
-
-- ✓ 3 new columns in analytics_students table
|
| 106 |
-
-- ✓ 1 new table: analytics_domain_evidence
|
| 107 |
-
-- ✓ 3 new indexes
|
| 108 |
-
-- ✓ 4 new RLS policies
|
| 109 |
-
|
| 110 |
-
-- Next steps:
|
| 111 |
-
-- 1. Restart your Flask backend: python app.py
|
| 112 |
-
-- 2. Test domain submission via API or frontend form
|
| 113 |
-
-- 3. Verify score fusion includes domain component
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
database/migrate_scores_table.sql
DELETED
|
@@ -1,71 +0,0 @@
|
|
| 1 |
-
-- Analytics Scores Table - Store computed reports
|
| 2 |
-
-- Run this in Supabase SQL Editor
|
| 3 |
-
|
| 4 |
-
-- Create table for storing computed analytics scores/reports
|
| 5 |
-
CREATE TABLE IF NOT EXISTS analytics_scores (
|
| 6 |
-
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
| 7 |
-
student_id TEXT REFERENCES analytics_students(student_id) ON DELETE CASCADE,
|
| 8 |
-
|
| 9 |
-
-- Overall scores
|
| 10 |
-
final_score REAL NOT NULL CHECK (final_score >= 0 AND final_score <= 100),
|
| 11 |
-
tier TEXT CHECK (tier IN ('Excellent', 'Good', 'Average', 'Needs Work')),
|
| 12 |
-
|
| 13 |
-
-- Component scores
|
| 14 |
-
universal_score REAL CHECK (universal_score >= 0 AND universal_score <= 100),
|
| 15 |
-
personality_score REAL CHECK (personality_score >= 0 AND personality_score <= 100),
|
| 16 |
-
text_score REAL CHECK (text_score >= 0 AND text_score <= 100),
|
| 17 |
-
|
| 18 |
-
-- Fidelity and domain
|
| 19 |
-
fidelity_score REAL CHECK (fidelity_score >= 0 AND fidelity_score <= 1),
|
| 20 |
-
detected_domain TEXT,
|
| 21 |
-
domain_confidence REAL CHECK (domain_confidence >= 0 AND domain_confidence <= 1),
|
| 22 |
-
|
| 23 |
-
-- Full report JSON (includes strengths, improvements, career suggestions, etc.)
|
| 24 |
-
full_report JSONB NOT NULL,
|
| 25 |
-
|
| 26 |
-
-- Timestamps
|
| 27 |
-
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
| 28 |
-
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
| 29 |
-
|
| 30 |
-
-- Only one score record per student (can be updated)
|
| 31 |
-
UNIQUE(student_id)
|
| 32 |
-
);
|
| 33 |
-
|
| 34 |
-
-- Index for faster lookups
|
| 35 |
-
CREATE INDEX IF NOT EXISTS idx_analytics_scores_student ON analytics_scores(student_id);
|
| 36 |
-
CREATE INDEX IF NOT EXISTS idx_analytics_scores_tier ON analytics_scores(tier);
|
| 37 |
-
CREATE INDEX IF NOT EXISTS idx_analytics_scores_domain ON analytics_scores(detected_domain);
|
| 38 |
-
|
| 39 |
-
-- RLS Policies
|
| 40 |
-
ALTER TABLE analytics_scores ENABLE ROW LEVEL SECURITY;
|
| 41 |
-
|
| 42 |
-
-- Users can view their own scores
|
| 43 |
-
CREATE POLICY "Users can view own scores"
|
| 44 |
-
ON analytics_scores FOR SELECT
|
| 45 |
-
TO authenticated
|
| 46 |
-
USING (student_id IN (SELECT student_id FROM analytics_students WHERE user_id = auth.uid()));
|
| 47 |
-
|
| 48 |
-
-- Service role can insert/update (for backend processing)
|
| 49 |
-
CREATE POLICY "Service can insert scores"
|
| 50 |
-
ON analytics_scores FOR INSERT
|
| 51 |
-
TO service_role
|
| 52 |
-
WITH CHECK (true);
|
| 53 |
-
|
| 54 |
-
CREATE POLICY "Service can update scores"
|
| 55 |
-
ON analytics_scores FOR UPDATE
|
| 56 |
-
TO service_role
|
| 57 |
-
USING (true);
|
| 58 |
-
|
| 59 |
-
-- Function to auto-update updated_at timestamp
|
| 60 |
-
CREATE OR REPLACE FUNCTION update_analytics_scores_timestamp()
|
| 61 |
-
RETURNS TRIGGER AS $$
|
| 62 |
-
BEGIN
|
| 63 |
-
NEW.updated_at = NOW();
|
| 64 |
-
RETURN NEW;
|
| 65 |
-
END;
|
| 66 |
-
$$ LANGUAGE plpgsql;
|
| 67 |
-
|
| 68 |
-
CREATE TRIGGER trigger_analytics_scores_updated
|
| 69 |
-
BEFORE UPDATE ON analytics_scores
|
| 70 |
-
FOR EACH ROW
|
| 71 |
-
EXECUTE FUNCTION update_analytics_scores_timestamp();
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
database/migrate_to_text_fields.sql
DELETED
|
@@ -1,17 +0,0 @@
|
|
| 1 |
-
-- Migration: Replace numeric fields with text fields for extracurricular, certifications, and internships
|
| 2 |
-
-- Run this in Supabase SQL Editor
|
| 3 |
-
|
| 4 |
-
-- Add new text columns
|
| 5 |
-
ALTER TABLE analytics_students
|
| 6 |
-
ADD COLUMN IF NOT EXISTS extracurricular_text TEXT,
|
| 7 |
-
ADD COLUMN IF NOT EXISTS certifications_text TEXT,
|
| 8 |
-
ADD COLUMN IF NOT EXISTS internship_text TEXT;
|
| 9 |
-
|
| 10 |
-
-- Optional: Drop old numeric columns if you want to clean up
|
| 11 |
-
-- Uncomment these lines after verifying the new text fields work
|
| 12 |
-
-- ALTER TABLE analytics_students DROP COLUMN IF EXISTS extracurricular_count;
|
| 13 |
-
-- ALTER TABLE analytics_students DROP COLUMN IF EXISTS certifications_count;
|
| 14 |
-
-- ALTER TABLE analytics_students DROP COLUMN IF EXISTS internship_total_months;
|
| 15 |
-
|
| 16 |
-
-- Note: If you want to keep both old and new columns during transition,
|
| 17 |
-
-- you can skip dropping the old columns and they will coexist.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
database/schema.sql
DELETED
|
@@ -1,151 +0,0 @@
|
|
| 1 |
-
-- Analytics Module Schema for Supabase
|
| 2 |
-
-- Run this in Supabase SQL Editor
|
| 3 |
-
|
| 4 |
-
-- 1. Students Table
|
| 5 |
-
CREATE TABLE IF NOT EXISTS analytics_students (
|
| 6 |
-
student_id TEXT PRIMARY KEY,
|
| 7 |
-
user_id UUID REFERENCES auth.users(id) ON DELETE CASCADE,
|
| 8 |
-
cgpa REAL NOT NULL CHECK (cgpa >= 0 AND cgpa <= 10),
|
| 9 |
-
sgpa_sem1 REAL CHECK (sgpa_sem1 >= 0 AND sgpa_sem1 <= 10),
|
| 10 |
-
sgpa_sem2 REAL CHECK (sgpa_sem2 >= 0 AND sgpa_sem2 <= 10),
|
| 11 |
-
sgpa_sem3 REAL CHECK (sgpa_sem3 >= 0 AND sgpa_sem3 <= 10),
|
| 12 |
-
sgpa_sem4 REAL CHECK (sgpa_sem4 >= 0 AND sgpa_sem4 <= 10),
|
| 13 |
-
sgpa_sem5 REAL CHECK (sgpa_sem5 >= 0 AND sgpa_sem5 <= 10),
|
| 14 |
-
sgpa_sem6 REAL CHECK (sgpa_sem6 >= 0 AND sgpa_sem6 <= 10),
|
| 15 |
-
sgpa_sem7 REAL CHECK (sgpa_sem7 >= 0 AND sgpa_sem7 <= 10),
|
| 16 |
-
sgpa_sem8 REAL CHECK (sgpa_sem8 >= 0 AND sgpa_sem8 <= 10),
|
| 17 |
-
tenth_pct REAL CHECK (tenth_pct >= 0 AND tenth_pct <= 100),
|
| 18 |
-
twelfth_pct REAL CHECK (twelfth_pct >= 0 AND twelfth_pct <= 100),
|
| 19 |
-
extracurricular_text TEXT,
|
| 20 |
-
certifications_text TEXT,
|
| 21 |
-
internship_text TEXT,
|
| 22 |
-
active_domain TEXT CHECK (active_domain IN ('tech', 'business', 'creative', 'research', NULL)),
|
| 23 |
-
domain_score REAL CHECK (domain_score >= 0 AND domain_score <= 1),
|
| 24 |
-
domain_confidence REAL CHECK (domain_confidence >= 0 AND domain_confidence <= 1),
|
| 25 |
-
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
| 26 |
-
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
| 27 |
-
);
|
| 28 |
-
|
| 29 |
-
-- 2. Personality Responses Table
|
| 30 |
-
CREATE TABLE IF NOT EXISTS analytics_personality_responses (
|
| 31 |
-
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
| 32 |
-
student_id TEXT REFERENCES analytics_students(student_id) ON DELETE CASCADE,
|
| 33 |
-
p_q1 INTEGER CHECK (p_q1 >= 1 AND p_q1 <= 5),
|
| 34 |
-
p_q2 INTEGER CHECK (p_q2 >= 1 AND p_q2 <= 5),
|
| 35 |
-
p_q3 INTEGER CHECK (p_q3 >= 1 AND p_q3 <= 5),
|
| 36 |
-
p_q4 INTEGER CHECK (p_q4 >= 1 AND p_q4 <= 5),
|
| 37 |
-
p_q5 INTEGER CHECK (p_q5 >= 1 AND p_q5 <= 5),
|
| 38 |
-
p_q6 INTEGER CHECK (p_q6 >= 1 AND p_q6 <= 5),
|
| 39 |
-
p_q7 INTEGER CHECK (p_q7 >= 1 AND p_q7 <= 5),
|
| 40 |
-
p_q8 INTEGER CHECK (p_q8 >= 1 AND p_q8 <= 5),
|
| 41 |
-
p_q9 INTEGER CHECK (p_q9 >= 1 AND p_q9 <= 5),
|
| 42 |
-
p_q10 INTEGER CHECK (p_q10 >= 1 AND p_q10 <= 5),
|
| 43 |
-
p_q11 INTEGER CHECK (p_q11 >= 1 AND p_q11 <= 5),
|
| 44 |
-
p_q12 INTEGER CHECK (p_q12 >= 1 AND p_q12 <= 5),
|
| 45 |
-
p_q13 INTEGER CHECK (p_q13 >= 1 AND p_q13 <= 5),
|
| 46 |
-
p_q14 INTEGER CHECK (p_q14 >= 1 AND p_q14 <= 5),
|
| 47 |
-
p_q15 INTEGER CHECK (p_q15 >= 1 AND p_q15 <= 5),
|
| 48 |
-
p_q16 INTEGER CHECK (p_q16 >= 1 AND p_q16 <= 5),
|
| 49 |
-
p_q17 INTEGER CHECK (p_q17 >= 1 AND p_q17 <= 5),
|
| 50 |
-
p_q18 INTEGER CHECK (p_q18 >= 1 AND p_q18 <= 5),
|
| 51 |
-
p_q19 INTEGER CHECK (p_q19 >= 1 AND p_q19 <= 5),
|
| 52 |
-
p_q20 INTEGER CHECK (p_q20 >= 1 AND p_q20 <= 5),
|
| 53 |
-
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
| 54 |
-
UNIQUE(student_id)
|
| 55 |
-
);
|
| 56 |
-
|
| 57 |
-
-- 3. Text Responses Table
|
| 58 |
-
CREATE TABLE IF NOT EXISTS analytics_text_responses (
|
| 59 |
-
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
| 60 |
-
student_id TEXT REFERENCES analytics_students(student_id) ON DELETE CASCADE,
|
| 61 |
-
text_q1 TEXT NOT NULL, -- Strengths
|
| 62 |
-
text_q2 TEXT NOT NULL, -- Career interests
|
| 63 |
-
text_q3 TEXT NOT NULL, -- Extracurriculars + leadership
|
| 64 |
-
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
| 65 |
-
UNIQUE(student_id)
|
| 66 |
-
);
|
| 67 |
-
|
| 68 |
-
-- 4. Domain Evidence Table
|
| 69 |
-
CREATE TABLE IF NOT EXISTS analytics_domain_evidence (
|
| 70 |
-
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
| 71 |
-
student_id TEXT REFERENCES analytics_students(student_id) ON DELETE CASCADE,
|
| 72 |
-
domain_type TEXT NOT NULL CHECK (domain_type IN ('tech', 'business', 'creative', 'research')),
|
| 73 |
-
evidence_data JSONB NOT NULL, -- Flexible storage for domain-specific inputs
|
| 74 |
-
domain_score REAL CHECK (domain_score >= 0 AND domain_score <= 1),
|
| 75 |
-
domain_confidence REAL CHECK (domain_confidence >= 0 AND domain_confidence <= 1),
|
| 76 |
-
raw_features JSONB, -- Raw feature values for explainability
|
| 77 |
-
processing_status TEXT DEFAULT 'pending' CHECK (processing_status IN ('pending', 'processing', 'completed', 'failed')),
|
| 78 |
-
error_message TEXT,
|
| 79 |
-
created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
| 80 |
-
updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
| 81 |
-
UNIQUE(student_id, domain_type)
|
| 82 |
-
);
|
| 83 |
-
|
| 84 |
-
-- Indexes
|
| 85 |
-
CREATE INDEX IF NOT EXISTS idx_analytics_students_user_id ON analytics_students(user_id);
|
| 86 |
-
CREATE INDEX IF NOT EXISTS idx_personality_student ON analytics_personality_responses(student_id);
|
| 87 |
-
CREATE INDEX IF NOT EXISTS idx_text_student ON analytics_text_responses(student_id);
|
| 88 |
-
CREATE INDEX IF NOT EXISTS idx_domain_evidence_student ON analytics_domain_evidence(student_id);
|
| 89 |
-
CREATE INDEX IF NOT EXISTS idx_domain_evidence_type ON analytics_domain_evidence(domain_type);
|
| 90 |
-
CREATE INDEX IF NOT EXISTS idx_domain_evidence_status ON analytics_domain_evidence(processing_status);
|
| 91 |
-
|
| 92 |
-
-- RLS Policies
|
| 93 |
-
ALTER TABLE analytics_students ENABLE ROW LEVEL SECURITY;
|
| 94 |
-
ALTER TABLE analytics_personality_responses ENABLE ROW LEVEL SECURITY;
|
| 95 |
-
ALTER TABLE analytics_text_responses ENABLE ROW LEVEL SECURITY;
|
| 96 |
-
|
| 97 |
-
-- Students can view/update their own data
|
| 98 |
-
CREATE POLICY "Users can view own analytics data"
|
| 99 |
-
ON analytics_students FOR SELECT
|
| 100 |
-
TO authenticated
|
| 101 |
-
USING (user_id = auth.uid());
|
| 102 |
-
|
| 103 |
-
CREATE POLICY "Users can insert own analytics data"
|
| 104 |
-
ON analytics_students FOR INSERT
|
| 105 |
-
TO authenticated
|
| 106 |
-
WITH CHECK (user_id = auth.uid());
|
| 107 |
-
|
| 108 |
-
CREATE POLICY "Users can update own analytics data"
|
| 109 |
-
ON analytics_students FOR UPDATE
|
| 110 |
-
TO authenticated
|
| 111 |
-
USING (user_id = auth.uid());
|
| 112 |
-
|
| 113 |
-
-- Personality responses
|
| 114 |
-
CREATE POLICY "Users can view own personality responses"
|
| 115 |
-
ON analytics_personality_responses FOR SELECT
|
| 116 |
-
TO authenticated
|
| 117 |
-
USING (student_id IN (SELECT student_id FROM analytics_students WHERE user_id = auth.uid()));
|
| 118 |
-
|
| 119 |
-
CREATE POLICY "Users can insert own personality responses"
|
| 120 |
-
ON analytics_personality_responses FOR INSERT
|
| 121 |
-
-- Text responses
|
| 122 |
-
CREATE POLICY "Users can view own text responses"
|
| 123 |
-
ON analytics_text_responses FOR SELECT
|
| 124 |
-
TO authenticated
|
| 125 |
-
USING (student_id IN (SELECT student_id FROM analytics_students WHERE user_id = auth.uid()));
|
| 126 |
-
|
| 127 |
-
CREATE POLICY "Users can insert own text responses"
|
| 128 |
-
ON analytics_text_responses FOR INSERT
|
| 129 |
-
TO authenticated
|
| 130 |
-
WITH CHECK (student_id IN (SELECT student_id FROM analytics_students WHERE user_id = auth.uid()));
|
| 131 |
-
|
| 132 |
-
-- Domain evidence
|
| 133 |
-
ALTER TABLE analytics_domain_evidence ENABLE ROW LEVEL SECURITY;
|
| 134 |
-
|
| 135 |
-
CREATE POLICY "Users can view own domain evidence"
|
| 136 |
-
ON analytics_domain_evidence FOR SELECT
|
| 137 |
-
TO authenticated
|
| 138 |
-
USING (student_id IN (SELECT student_id FROM analytics_students WHERE user_id = auth.uid()));
|
| 139 |
-
|
| 140 |
-
CREATE POLICY "Users can insert own domain evidence"
|
| 141 |
-
ON analytics_domain_evidence FOR INSERT
|
| 142 |
-
TO authenticated
|
| 143 |
-
WITH CHECK (student_id IN (SELECT student_id FROM analytics_students WHERE user_id = auth.uid()));
|
| 144 |
-
|
| 145 |
-
CREATE POLICY "Users can update own domain evidence"
|
| 146 |
-
ON analytics_domain_evidence FOR UPDATE
|
| 147 |
-
TO authenticated
|
| 148 |
-
USING (student_id IN (SELECT student_id FROM analytics_students WHERE user_id = auth.uid()));
|
| 149 |
-
ON analytics_text_responses FOR INSERT
|
| 150 |
-
TO authenticated
|
| 151 |
-
WITH CHECK (student_id IN (SELECT student_id FROM analytics_students WHERE user_id = auth.uid()));
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
database/storage_bucket_db.py
ADDED
|
@@ -0,0 +1,196 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Supabase Storage-based storage for analytics data"""
|
| 2 |
+
import json
|
| 3 |
+
import os
|
| 4 |
+
from datetime import datetime
|
| 5 |
+
from typing import Dict, Any, Optional, List
|
| 6 |
+
import uuid
|
| 7 |
+
|
| 8 |
+
class StorageBucketDB:
|
| 9 |
+
"""Uses Supabase Storage bucket to store analytics JSON files"""
|
| 10 |
+
|
| 11 |
+
BUCKET_NAME = 'analytics-data'
|
| 12 |
+
|
| 13 |
+
def __init__(self, supabase_client):
|
| 14 |
+
self.supabase = supabase_client
|
| 15 |
+
self.storage = supabase_client.storage
|
| 16 |
+
print(f"[StorageBucketDB] Initialized with bucket: {self.BUCKET_NAME}")
|
| 17 |
+
|
| 18 |
+
# In-memory cache for faster reads
|
| 19 |
+
self._cache = {}
|
| 20 |
+
|
| 21 |
+
def _get_path(self, table_name: str) -> str:
|
| 22 |
+
"""Get storage path for a table"""
|
| 23 |
+
return f"{table_name}.json"
|
| 24 |
+
|
| 25 |
+
def _load_table(self, table_name: str) -> Dict:
|
| 26 |
+
"""Load table data from storage"""
|
| 27 |
+
if table_name in self._cache:
|
| 28 |
+
return self._cache[table_name]
|
| 29 |
+
|
| 30 |
+
try:
|
| 31 |
+
path = self._get_path(table_name)
|
| 32 |
+
response = self.storage.from_(self.BUCKET_NAME).download(path)
|
| 33 |
+
if response:
|
| 34 |
+
data = json.loads(response.decode('utf-8'))
|
| 35 |
+
self._cache[table_name] = data
|
| 36 |
+
print(f"[StorageBucketDB] Loaded {table_name}: {len(data)} records")
|
| 37 |
+
return data
|
| 38 |
+
except Exception as e:
|
| 39 |
+
print(f"[StorageBucketDB] Table {table_name} not found or empty: {e}")
|
| 40 |
+
|
| 41 |
+
self._cache[table_name] = {}
|
| 42 |
+
return {}
|
| 43 |
+
|
| 44 |
+
def _save_table(self, table_name: str, data: Dict):
|
| 45 |
+
"""Save table data to storage"""
|
| 46 |
+
try:
|
| 47 |
+
path = self._get_path(table_name)
|
| 48 |
+
content = json.dumps(data, indent=2, default=str).encode('utf-8')
|
| 49 |
+
|
| 50 |
+
# Try to update first, then upload if not exists
|
| 51 |
+
try:
|
| 52 |
+
self.storage.from_(self.BUCKET_NAME).update(path, content, {
|
| 53 |
+
'content-type': 'application/json'
|
| 54 |
+
})
|
| 55 |
+
except:
|
| 56 |
+
self.storage.from_(self.BUCKET_NAME).upload(path, content, {
|
| 57 |
+
'content-type': 'application/json'
|
| 58 |
+
})
|
| 59 |
+
|
| 60 |
+
self._cache[table_name] = data
|
| 61 |
+
print(f"[StorageBucketDB] Saved {table_name}: {len(data)} records")
|
| 62 |
+
except Exception as e:
|
| 63 |
+
print(f"[StorageBucketDB] Error saving {table_name}: {e}")
|
| 64 |
+
raise
|
| 65 |
+
|
| 66 |
+
def table(self, table_name: str) -> 'StorageTable':
|
| 67 |
+
"""Return a table-like interface"""
|
| 68 |
+
return StorageTable(self, table_name)
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class StorageTable:
|
| 72 |
+
"""Mimics Supabase table interface using storage bucket"""
|
| 73 |
+
|
| 74 |
+
def __init__(self, db: StorageBucketDB, table_name: str):
|
| 75 |
+
self.db = db
|
| 76 |
+
self.table_name = table_name
|
| 77 |
+
self._query = {}
|
| 78 |
+
self._single = False
|
| 79 |
+
|
| 80 |
+
def select(self, columns: str = '*') -> 'StorageTable':
|
| 81 |
+
return self
|
| 82 |
+
|
| 83 |
+
def eq(self, column: str, value: Any) -> 'StorageTable':
|
| 84 |
+
self._query[column] = value
|
| 85 |
+
return self
|
| 86 |
+
|
| 87 |
+
def maybe_single(self) -> 'StorageTable':
|
| 88 |
+
self._single = True
|
| 89 |
+
return self
|
| 90 |
+
|
| 91 |
+
def single(self) -> 'StorageTable':
|
| 92 |
+
self._single = True
|
| 93 |
+
return self
|
| 94 |
+
|
| 95 |
+
def insert(self, record: Dict) -> 'StorageTable':
|
| 96 |
+
self._insert_data = record
|
| 97 |
+
return self
|
| 98 |
+
|
| 99 |
+
def upsert(self, record: Dict) -> 'StorageTable':
|
| 100 |
+
self._upsert_data = record
|
| 101 |
+
return self
|
| 102 |
+
|
| 103 |
+
def update(self, record: Dict) -> 'StorageTable':
|
| 104 |
+
self._update_data = record
|
| 105 |
+
return self
|
| 106 |
+
|
| 107 |
+
def execute(self) -> 'StorageResult':
|
| 108 |
+
"""Execute the query or write operation"""
|
| 109 |
+
# Handle write operations
|
| 110 |
+
if hasattr(self, '_insert_data'):
|
| 111 |
+
return self._do_insert()
|
| 112 |
+
if hasattr(self, '_upsert_data'):
|
| 113 |
+
return self._do_upsert()
|
| 114 |
+
if hasattr(self, '_update_data'):
|
| 115 |
+
return self._do_update()
|
| 116 |
+
|
| 117 |
+
# Handle read operation
|
| 118 |
+
data = self.db._load_table(self.table_name)
|
| 119 |
+
|
| 120 |
+
if self._query:
|
| 121 |
+
results = []
|
| 122 |
+
for key, record in data.items():
|
| 123 |
+
match = all(record.get(k) == v for k, v in self._query.items())
|
| 124 |
+
if match:
|
| 125 |
+
results.append(record)
|
| 126 |
+
|
| 127 |
+
if self._single and results:
|
| 128 |
+
return StorageResult(results[0])
|
| 129 |
+
elif self._single:
|
| 130 |
+
return StorageResult(None)
|
| 131 |
+
return StorageResult(results)
|
| 132 |
+
else:
|
| 133 |
+
return StorageResult(list(data.values()))
|
| 134 |
+
|
| 135 |
+
def _do_insert(self) -> 'StorageResult':
|
| 136 |
+
record = self._insert_data
|
| 137 |
+
data = self.db._load_table(self.table_name)
|
| 138 |
+
|
| 139 |
+
if 'id' not in record:
|
| 140 |
+
record['id'] = str(uuid.uuid4())
|
| 141 |
+
|
| 142 |
+
key = record.get('student_id', record.get('id'))
|
| 143 |
+
record['created_at'] = datetime.utcnow().isoformat()
|
| 144 |
+
|
| 145 |
+
data[key] = record
|
| 146 |
+
self.db._save_table(self.table_name, data)
|
| 147 |
+
|
| 148 |
+
print(f"[StorageBucketDB] Inserted into {self.table_name}: {key}")
|
| 149 |
+
return StorageResult([record])
|
| 150 |
+
|
| 151 |
+
def _do_upsert(self) -> 'StorageResult':
|
| 152 |
+
record = self._upsert_data
|
| 153 |
+
data = self.db._load_table(self.table_name)
|
| 154 |
+
|
| 155 |
+
key = record.get('student_id', str(uuid.uuid4()))
|
| 156 |
+
|
| 157 |
+
if key in data:
|
| 158 |
+
existing = data[key]
|
| 159 |
+
existing.update(record)
|
| 160 |
+
existing['updated_at'] = datetime.utcnow().isoformat()
|
| 161 |
+
record = existing
|
| 162 |
+
else:
|
| 163 |
+
record['id'] = str(uuid.uuid4())
|
| 164 |
+
record['created_at'] = datetime.utcnow().isoformat()
|
| 165 |
+
|
| 166 |
+
data[key] = record
|
| 167 |
+
self.db._save_table(self.table_name, data)
|
| 168 |
+
|
| 169 |
+
print(f"[StorageBucketDB] Upserted into {self.table_name}: {key}")
|
| 170 |
+
return StorageResult([record])
|
| 171 |
+
|
| 172 |
+
def _do_update(self) -> 'StorageResult':
|
| 173 |
+
updates = self._update_data
|
| 174 |
+
data = self.db._load_table(self.table_name)
|
| 175 |
+
updated = []
|
| 176 |
+
|
| 177 |
+
for key, record in data.items():
|
| 178 |
+
match = all(record.get(k) == v for k, v in self._query.items())
|
| 179 |
+
if match:
|
| 180 |
+
record.update(updates)
|
| 181 |
+
record['updated_at'] = datetime.utcnow().isoformat()
|
| 182 |
+
updated.append(record)
|
| 183 |
+
|
| 184 |
+
self.db._save_table(self.table_name, data)
|
| 185 |
+
print(f"[StorageBucketDB] Updated {len(updated)} records in {self.table_name}")
|
| 186 |
+
return StorageResult(updated)
|
| 187 |
+
|
| 188 |
+
|
| 189 |
+
class StorageResult:
|
| 190 |
+
"""Mimics Supabase result object"""
|
| 191 |
+
|
| 192 |
+
def __init__(self, data: Any):
|
| 193 |
+
self.data = data
|
| 194 |
+
|
| 195 |
+
def execute(self) -> 'StorageResult':
|
| 196 |
+
return self
|