Spaces:
Sleeping
Sleeping
| #!/usr/bin/env python3 | |
| # -*- coding: utf-8 -*- | |
| """ | |
| Veureu Compliance Service | |
| Microservicio dedicado a cumplimiento normativo con resiliencia: | |
| - Google OAuth authentication | |
| - AWS QLDB consent recording | |
| - Polygon blockchain publishing | |
| - Email notifications | |
| - Digest verification | |
| - Compliance dashboard API | |
| - Resilience & Recovery management | |
| Este servicio maneja desconexiones de Hugging Face gracefully, | |
| asegurando persistencia de datos en AWS QLDB y continuidad del servicio. | |
| """ | |
| import os | |
| import json | |
| import hashlib | |
| import time | |
| from datetime import datetime, timezone | |
| from typing import Dict, Any, Optional, List | |
| from dataclasses import dataclass | |
| import logging | |
| import streamlit as st | |
| import requests | |
| from fastapi import FastAPI, HTTPException | |
| from fastapi.middleware.cors import CORSMiddleware | |
| from pydantic import BaseModel | |
| import uvicorn | |
| # Importaciones del sistema de resiliencia | |
| from config import config, get_service_status | |
| from phone_verification import router as phone_verification_router | |
| from resilience_manager import resilience_manager | |
| from recovery_manager import recovery_manager | |
| # Configuración logging | |
| logging.basicConfig( | |
| level=getattr(logging, config.LOG_LEVEL), | |
| format='%(asctime)s - %(name)s - %(levelname)s - %(message)s' | |
| ) | |
| logger = logging.getLogger(__name__) | |
| # --- CONFIGURACIÓN FASTAPI --- | |
| app = FastAPI( | |
| title="Veureu Compliance Service", | |
| version="1.0.0", | |
| description="Microservicio de cumplimiento normativo con resiliencia" | |
| ) | |
| # CORS per permetre crides des del Space demo (i altres orígens web) | |
| app.add_middleware( | |
| CORSMiddleware, | |
| allow_origins=["*"], # En producció pots restringir-ho al domini del Space demo | |
| allow_credentials=True, | |
| allow_methods=["*"], | |
| allow_headers=["*"], | |
| ) | |
| # Router de verificació per telèfon / WhatsApp | |
| app.include_router(phone_verification_router) | |
| # --- INICIALIZACIÓN CON RECUPERACIÓN --- | |
| def initialize_service(): | |
| """Inicializa el servicio con recuperación de estado (no bloqueante)""" | |
| logger.info("=== INICIANDO VEUREU COMPLIANCE SERVICE ===") | |
| logger.info(f"Environment: {config.ENVIRONMENT}") | |
| logger.info(f"Log Level: {config.LOG_LEVEL}") | |
| # Verificación simple sin ejecutar recuperación automática | |
| service_health = resilience_manager.get_service_health() | |
| logger.info(f"Estado del servicio: {service_health['status']}") | |
| logger.info(f"Uptime: {service_health['uptime_human']}") | |
| logger.info("Recuperación automática desactivada (usar API /api/recovery/status para detalles)") | |
| return {"status": "initialized", "service_health": service_health} | |
| # Ejecutar inicialización simple | |
| startup_recovery = initialize_service() | |
| # --- MODELOS DE DATOS --- | |
| class UserValidation(BaseModel): | |
| token: str | |
| class LoginURLRequest(BaseModel): | |
| callback_url: str | |
| class ConsentRecord(BaseModel): | |
| user_info: Dict[str, Any] | |
| video_info: Dict[str, Any] | |
| consent_data: Dict[str, Any] | |
| class ValidationRequest(BaseModel): | |
| document_id: str | |
| user_email: str | |
| user_name: str | |
| video_title: str | |
| video_hash: str | |
| timestamp: str | |
| video_url: str | |
| consent_data: Dict[str, Any] | |
| class ValidatorDecision(BaseModel): | |
| document_id: str | |
| validator_email: str | |
| decision: str # "approved" o "rejected" | |
| comments: str = "" | |
| class DigestPublish(BaseModel): | |
| period: str # YYYY-MM | |
| digest_hash: str | None = None | |
| class DigestVerify(BaseModel): | |
| period: str | |
| expected_hash: str | |
| class EventsDigestPublish(BaseModel): | |
| session_id: str | |
| digest_hash: str | |
| class ActionsQLDBPayload(BaseModel): | |
| session_id: str | |
| actions: List[Dict[str, Any]] | |
| class LogoutRequest(BaseModel): | |
| token: str | |
| class VideoUploadNotification(BaseModel): | |
| video_name: str | |
| sha1sum: str | |
| class LoginSMSRequest(BaseModel): | |
| phone: str | |
| code: str | |
| # --- AUTENTICACIÓN (SIMULADA) --- | |
| def validate_google_token(token: str) -> Optional[Dict[str, Any]]: | |
| """ | |
| Valida token de Google OAuth (simulado hasta integración real) | |
| Args: | |
| token: JWT token de Google | |
| Returns: | |
| Dict con info del usuario o None | |
| """ | |
| # Temporal: simulación de validación | |
| if token.startswith("sim_token_"): | |
| email = token.replace("sim_token_", "").replace("_sim", "@example.com") | |
| return { | |
| "email": email, | |
| "name": email.split("@")[0], | |
| "picture": "https://via.placeholder.com/150", | |
| "verified": True | |
| } | |
| # En producción: usar google-auth-library | |
| """ | |
| try: | |
| from google.oauth2 import id_token | |
| from google.auth.transport import requests as google_requests | |
| idinfo = id_token.verify_oauth2_token( | |
| token, | |
| google_requests.Request(), | |
| GOOGLE_CLIENT_ID | |
| ) | |
| return { | |
| "email": idinfo["email"], | |
| "name": idinfo["name"], | |
| "picture": idinfo["picture"], | |
| "verified": idinfo.get("email_verified", False) | |
| } | |
| except Exception as e: | |
| logger.error(f"Error validando token Google: {e}") | |
| return None | |
| """ | |
| return None | |
| def generate_oauth_login_url(callback_url: str) -> str: | |
| """ | |
| Genera URL de login OAuth de Google | |
| Args: | |
| callback_url: URL de retorno | |
| Returns: | |
| URL completa para redirección OAuth | |
| """ | |
| # Temporal: URL simulada que devuelve token | |
| callback_encoded = requests.utils.quote(callback_url) | |
| simulated_token = "sim_token_user123_sim" | |
| return f"{callback_encoded}?auth_token={simulated_token}" | |
| # En producción: configurar OAuth real | |
| """ | |
| client_id = GOOGLE_CLIENT_ID | |
| redirect_uri = requests.utils.quote(callback_url) | |
| scope = "email profile" | |
| return f"https://accounts.google.com/oauth/authorize?client_id={client_id}&redirect_uri={redirect_uri}&response_type=code&scope={scope}" | |
| """ | |
| # --- AWS QLDB INTEGRATION (SIMULADA) --- | |
| def record_consent_qldb(user_info: Dict[str, Any], | |
| video_info: Dict[str, Any], | |
| consent_data: Dict[str, Any]) -> Optional[str]: | |
| """ | |
| Registra consentimiento en AWS QLDB | |
| Args: | |
| user_info: Información del usuario | |
| video_info: Información del vídeo | |
| consent_data: Datos de consentimiento | |
| Returns: | |
| Document ID o None si error | |
| """ | |
| # Temporal: simulación | |
| timestamp = int(time.time()) | |
| content_hash = hashlib.md5(json.dumps({ | |
| 'user': user_info, | |
| 'video': video_info, | |
| 'timestamp': timestamp | |
| }).encode()).hexdigest()[:8] | |
| document_id = f"doc_{timestamp}_{content_hash}" | |
| logger.info(f"[QLDB - SIMULATED] Consentimiento registrado: {document_id}") | |
| logger.info(f"[QLDB - SIMULATED] Usuario: {user_info.get('email', 'unknown')}") | |
| logger.info(f"[QLDB - SIMULATED] Vídeo: {video_info.get('name', 'unknown')}") | |
| return document_id | |
| # En producción: integración real con QLDB | |
| """ | |
| try: | |
| import boto3 | |
| from amazon.ion.simpleion import dumps, loads | |
| client = boto3.client('qldb-session') | |
| # Crear documento en QLDB | |
| document = { | |
| 'user_email': user_info['email'], | |
| 'user_name': user_info['name'], | |
| 'video_name': video_info['name'], | |
| 'video_size': video_info['size'], | |
| 'consent_data': consent_data, | |
| 'timestamp': datetime.now(timezone.utc).isoformat(), | |
| 'status': 'pending_validation' | |
| } | |
| # Ejecutar transacción QLDB | |
| result = client.execute_statement( | |
| LedgerName=QLDB_LEDGER_NAME, | |
| Statement=f"INSERT INTO compliance_records VALUE ?", | |
| Parameters=[dumps(document)] | |
| ) | |
| document_id = result['Documents'][0]['Id'] | |
| logger.info(f"Consentimiento registrado en QLDB: {document_id}") | |
| return document_id | |
| except Exception as e: | |
| logger.error(f"Error registrando consentimiento en QLDB: {e}") | |
| return None | |
| """ | |
| # --- POLYGON BLOCKCHAIN INTEGRATION (SIMULADA) --- | |
| def publish_digest_polygon(period: str, authorizations: List[Dict[str, Any]]) -> Optional[str]: | |
| """ | |
| Publica digest mensual en Polygon blockchain | |
| Args: | |
| period: Período YYYY-MM | |
| authorizations: Lista de autorizaciones del período | |
| Returns: | |
| Transaction hash o None si error | |
| """ | |
| try: | |
| # Calcular digest hash | |
| sorted_auths = sorted(authorizations, key=lambda x: x.get('timestamp', '')) | |
| digest_data = "" | |
| for auth in sorted_auths: | |
| auth_json = json.dumps({ | |
| 'user_email': auth.get('user_email', ''), | |
| 'video_hash': auth.get('video_hash', ''), | |
| 'timestamp': auth.get('timestamp', ''), | |
| 'consent_accepted': auth.get('consent_accepted', False), | |
| 'validation_status': auth.get('validation_status', ''), | |
| 'document_id': auth.get('document_id', '') | |
| }, sort_keys=True, separators=(',', ':')) | |
| digest_data += auth_json + "|" | |
| digest_hash = hashlib.sha256(digest_data.encode('utf-8')).hexdigest() | |
| # Temporal: simulación de publicación | |
| block_number = 12345000 + int(time.time()) % 1000 | |
| tx_hash = f"0x{digest_hash[:32]}" | |
| logger.info(f"[POLYGON - SIMULATED] Digest publicado: {period}") | |
| logger.info(f"[POLYGON - SIMULATED] Autorizaciones: {len(authorizations)}") | |
| logger.info(f"[POLYGON - SIMULATED] Hash: {digest_hash[:16]}...") | |
| logger.info(f"[POLYGON - SIMULATED] TX: {tx_hash}") | |
| logger.info(f"[POLYGON - SIMULATED] Block: {block_number}") | |
| return tx_hash | |
| # En producción: integración real con Web3 | |
| """ | |
| from web3 import Web3 | |
| w3 = Web3(Web3.HTTPProvider(POLYGON_RPC_URL)) | |
| account = w3.eth.account.from_key(POLYGON_PRIVATE_KEY) | |
| contract = w3.eth.contract( | |
| address=Web3.to_checksum_address(DIGEST_CONTRACT_ADDR), | |
| abi=DIGEST_CONTRACT_ABI | |
| ) | |
| nonce = w3.eth.get_transaction_count(account.address) | |
| tx = contract.functions.publish( | |
| Web3.to_bytes(hexstr=digest_hash), | |
| period | |
| ).build_transaction({ | |
| "from": account.address, | |
| "nonce": nonce, | |
| "gas": 120000, | |
| "maxFeePerGas": w3.to_wei('60', 'gwei'), | |
| "maxPriorityFeePerGas": w3.to_wei('2', 'gwei'), | |
| "chainId": POLYGON_CHAIN_ID | |
| }) | |
| signed = w3.eth.account.sign_transaction(tx, POLYGON_PRIVATE_KEY) | |
| tx_hash = w3.eth.send_raw_transaction(signed.rawTransaction) | |
| receipt = w3.eth.wait_for_transaction_receipt(tx_hash) | |
| logger.info(f"Digest publicado en Polygon: {receipt.transactionHash.hex()}") | |
| return receipt.transactionHash.hex() | |
| """ | |
| except Exception as e: | |
| logger.error(f"Error publicando digest en Polygon: {e}") | |
| return None | |
| # --- EMAIL NOTIFICATIONS (SIMULADAS) --- | |
| def send_validation_email(validation_request: ValidationRequest) -> bool: | |
| """ | |
| Envía email de validación a validadores | |
| Args: | |
| validation_request: Datos para email de validación | |
| Returns: | |
| True si éxito, False si error | |
| """ | |
| try: | |
| # Temporal: simulación de email | |
| logger.info(f"[EMAIL - SIMULATED] =======================================") | |
| logger.info(f"[EMAIL - SIMULATED] ENVIANDO VALIDACIÓN") | |
| logger.info(f"[EMAIL - SIMULATED] =======================================") | |
| logger.info(f"[EMAIL - SIMULATED] Document ID: {validation_request.document_id}") | |
| logger.info(f"[EMAIL - SIMULATED] Usuario: {validation_request.user_email}") | |
| logger.info(f"[EMAIL - SIMULATED] Nombre: {validation_request.user_name}") | |
| logger.info(f"[EMAIL - SIMULATED] Vídeo: {validation_request.video_title}") | |
| logger.info(f"[EMAIL - SIMULATED] Hash: {validation_request.video_hash}") | |
| logger.info(f"[EMAIL - SIMULATED] Timestamp: {validation_request.timestamp}") | |
| logger.info(f"[EMAIL - SIMULATED] URL: {validation_request.video_url}") | |
| logger.info(f"[EMAIL - SIMULATED] Consentimientos:") | |
| for key, value in validation_request.consent_data.items(): | |
| logger.info(f"[EMAIL - SIMULATED] - {key}: {value}") | |
| logger.info(f"[EMAIL - SIMULATED] =======================================") | |
| return True | |
| # En producción: integración SMTP real | |
| """ | |
| import smtplib | |
| from email.mime.text import MIMEText | |
| from email.mime.multipart import MIMEMultipart | |
| # Crear HTML del email | |
| html_content = f'''<html> | |
| <body> | |
| <h2>Solicitud de Validacion - Veureu</h2> | |
| <p>Se ha subido un nuevo video que requiere validacion:</p> | |
| <ul> | |
| <li><strong>Usuario:</strong> {validation_request.user_email}</li> | |
| <li><strong>Video:</strong> {validation_request.video_title}</li> | |
| <li><strong>Fecha:</strong> {validation_request.timestamp}</li> | |
| <li><strong>ID:</strong> {validation_request.document_id}</li> | |
| </ul> | |
| <h3>Acciones</h3> | |
| <p> | |
| <a href="{validation_request.video_url}/approve?doc_id={validation_request.document_id}" | |
| style="background-color: #28a745; color: white; padding: 10px 20px; text-decoration: none; border-radius: 4px;"> | |
| Aprobar | |
| </a> | |
| | |
| <a href="{validation_request.video_url}/reject?doc_id={validation_request.document_id}" | |
| style="background-color: #dc3545; color: white; padding: 10px 20px; text-decoration: none; border-radius: 4px;"> | |
| Rechazar | |
| </a> | |
| </p> | |
| <h3>Consentimientos</h3> | |
| <pre>{json.dumps(validation_request.consent_data, indent=2)}</pre> | |
| </body> | |
| </html>''' | |
| msg = MIMEMultipart('alternative') | |
| msg['Subject'] = f"Validación requerida - {validation_request.document_id}" | |
| msg['From'] = SMTP_USERNAME | |
| msg['To'] = "validators@veureu.cat" | |
| msg.attach(MIMEText(html_content, 'html')) | |
| # Enviar email | |
| with smtplib.SMTP(SMTP_SERVER, SMTP_PORT) as server: | |
| server.starttls() | |
| server.login(SMTP_USERNAME, SMTP_PASSWORD) | |
| server.send_message(msg) | |
| logger.info(f"Email de validación enviado para: {validation_request.document_id}") | |
| return True | |
| """ | |
| except Exception as e: | |
| logger.error(f"Error enviando email de validación: {e}") | |
| return False | |
| # --- API ENDPOINTS --- | |
| def root(): | |
| """Health check endpoint con información de resiliencia""" | |
| service_health = resilience_manager.get_service_health() | |
| service_status = get_service_status() | |
| return { | |
| "service": "Veureu Compliance Service", | |
| "status": service_health["status"], | |
| "version": "1.0.0", | |
| "timestamp": datetime.now().isoformat(), | |
| "environment": config.ENVIRONMENT, | |
| "uptime": service_health["uptime_human"], | |
| "total_processed": service_health["total_processed"], | |
| "error_count": service_health["error_count"], | |
| "pending_validations": service_health["pending_validations"], | |
| "last_digest_publish": service_health["last_digest_publish"], | |
| "services": service_status | |
| } | |
| def get_recovery_status(): | |
| """Obtiene estado detallado de recuperación y resiliencia""" | |
| recovery_report = recovery_manager.perform_recovery_check() | |
| service_health = resilience_manager.get_service_health() | |
| return { | |
| "timestamp": datetime.now().isoformat(), | |
| "service_health": service_health, | |
| "recovery_report": recovery_report, | |
| "resilience_config": { | |
| "state_file": resilience_manager.state_file, | |
| "auto_recovery": True, | |
| "persistence_enabled": True | |
| } | |
| } | |
| def force_recovery(): | |
| """Fuerza una recuperación completa del servicio""" | |
| try: | |
| recovery_report = recovery_manager.force_full_recovery() | |
| return { | |
| "success": True, | |
| "timestamp": datetime.now().isoformat(), | |
| "recovery_report": recovery_report | |
| } | |
| except Exception as e: | |
| logger.error(f"Error forzando recuperación: {e}") | |
| raise HTTPException(status_code=500, detail=f"Error en recuperación: {str(e)}") | |
| def detailed_health_check(): | |
| """Health check detallado con todos los componentes""" | |
| service_health = resilience_manager.get_service_health() | |
| connectivity = recovery_manager._check_external_connectivity() | |
| data_integrity = recovery_manager._check_data_integrity() | |
| return { | |
| "timestamp": datetime.now().isoformat(), | |
| "overall_status": "healthy" if ( | |
| service_health["status"] == "healthy" and | |
| connectivity["overall"]["status"] == "healthy" and | |
| data_integrity["status"] == "healthy" | |
| ) else "degraded", | |
| "service": service_health, | |
| "connectivity": connectivity, | |
| "data_integrity": data_integrity, | |
| "environment": config.ENVIRONMENT, | |
| "features": { | |
| "blockchain": config.has_blockchain_config(), | |
| "qldb": config.has_qldb_config(), | |
| "oauth": config.has_oauth_config(), | |
| "email": config.has_email_config() | |
| } | |
| } | |
| # === AUTENTICACIÓN === | |
| def validate_user(user_data: UserValidation): | |
| """Valida token de usuario""" | |
| user_info = validate_google_token(user_data.token) | |
| if user_info: | |
| return user_info | |
| else: | |
| raise HTTPException(status_code=401, detail="Token inválido") | |
| def get_login_url(request: LoginURLRequest): | |
| """Obtiene URL de login OAuth""" | |
| login_url = generate_oauth_login_url(request.callback_url) | |
| if login_url: | |
| return {"login_url": login_url} | |
| else: | |
| raise HTTPException(status_code=500, detail="Error generando URL de login") | |
| def logout_user(request: LogoutRequest): | |
| """Invalida sesión de usuario""" | |
| logger.info(f"Logout request for token: {request.token[:20]}...") | |
| return {"success": True} | |
| # === CUMPLIMIENTO (QLDB) === | |
| def record_consent(consent: ConsentRecord): | |
| """Registra consentimiento de usuario""" | |
| document_id = record_consent_qldb( | |
| consent.user_info, | |
| consent.video_info, | |
| consent.consent_data | |
| ) | |
| if document_id: | |
| return {"document_id": document_id} | |
| else: | |
| raise HTTPException(status_code=500, detail="Error registrando consentimiento") | |
| def send_validation(request: ValidationRequest): | |
| """Envía solicitud de validación""" | |
| validation_request = ValidationRequest(**request.dict()) | |
| success = send_validation_email(validation_request) | |
| if success: | |
| return {"success": True} | |
| else: | |
| raise HTTPException(status_code=500, detail="Error enviando validación") | |
| def send_video_upload_sms(notification: VideoUploadNotification): | |
| """Envia un SMS al validor de vídeos quan es puja un nou vídeo. | |
| Utilitza el webhook de Zapier definit a ZAPIER_WEBHOOK_CATCH i el telèfon | |
| configurat a VIDEO_VALIDATOR_PHONE. | |
| """ | |
| zapier_url = os.getenv("ZAPIER_WEBHOOK_CATCH") | |
| validator_phone = os.getenv("VIDEO_VALIDATOR_PHONE") | |
| if not zapier_url or not validator_phone: | |
| logger.error("[VIDEO SMS] Falta ZAPIER_WEBHOOK_CATCH o VIDEO_VALIDATOR_PHONE") | |
| raise HTTPException(status_code=500, detail="SMS validator config missing") | |
| message = f"Revise el vídeo {notification.video_name}" | |
| try: | |
| resp = requests.post( | |
| zapier_url, | |
| json={ | |
| "phone": validator_phone, | |
| "message": message, | |
| "sha1sum": notification.sha1sum, | |
| "video_name": notification.video_name, | |
| }, | |
| timeout=10, | |
| ) | |
| if not resp.ok: | |
| logger.error( | |
| f"[VIDEO SMS] Error HTTP enviant SMS al validor: {resp.status_code} {resp.text}" | |
| ) | |
| raise HTTPException(status_code=500, detail="Error enviant SMS al validor") | |
| logger.info( | |
| f"[VIDEO SMS] SMS de validació enviat a {validator_phone} per al vídeo {notification.video_name}" | |
| ) | |
| return {"success": True} | |
| except HTTPException: | |
| raise | |
| except Exception as e: | |
| logger.error(f"[VIDEO SMS] Error inesperat enviant SMS al validor: {e}") | |
| raise HTTPException(status_code=500, detail="Error inesperat enviant SMS al validor") | |
| def send_login_sms(req: LoginSMSRequest): | |
| """Envia un SMS de verificació de mòbil per al login des de demo. | |
| Utilitza el webhook de Zapier (ZAPIER_WEBHOOK_CATCH) definit en aquest space. | |
| El missatge no inclou el codi en pantalla; el codi s'envia només per SMS. | |
| """ | |
| logger.info(f"[LOGIN SMS] Petició rebuda per telèfon: {req.phone}") | |
| # Normalitzar telèfon a format internacional E.164 | |
| normalized_phone = req.phone.strip().replace(" ", "") | |
| if not normalized_phone.startswith("+"): | |
| normalized_phone = "+34" + normalized_phone | |
| # Missatge incloent el codi en clar perquè l'usuari el pugui introduir a demo | |
| message = f"El teu codi secret de validació és: {req.code}" | |
| use_zapier = os.getenv("AUTOMATION_ZAPIER_ENABLED", "false").lower() == "true" | |
| use_twilio = os.getenv("AUTOMATION_TWILIO_ENABLED", "true").lower() == "true" | |
| try: | |
| # 1) Si Zapier està habilitat, prioritzar el flux actual | |
| if use_zapier: | |
| zapier_url = os.getenv("ZAPIER_WEBHOOK_CATCH") | |
| if not zapier_url: | |
| logger.error("[LOGIN SMS] ZAPIER_WEBHOOK_CATCH no definit") | |
| raise HTTPException(status_code=500, detail="ZAPIER webhook not configured") | |
| payload = { | |
| "phone": normalized_phone, | |
| "message": message, | |
| "code": req.code, | |
| } | |
| logger.info(f"[LOGIN SMS] Enviant petició a Zapier per a {normalized_phone}...") | |
| resp = requests.post(zapier_url, json=payload, timeout=10) | |
| logger.info(f"[LOGIN SMS] Resposta Zapier status={resp.status_code}") | |
| if not resp.ok: | |
| logger.error( | |
| f"[LOGIN SMS] Error HTTP enviant SMS de login via Zapier: {resp.status_code} {resp.text}" | |
| ) | |
| raise HTTPException(status_code=500, detail="Error enviant SMS de login") | |
| logger.info(f"[LOGIN SMS] SMS de verificació enviat a {normalized_phone} via Zapier") | |
| return {"success": True} | |
| # 2) Si Zapier no està habilitat però Twilio sí, enviar directament via Twilio | |
| if use_twilio: | |
| account_sid = os.getenv("TWILIO_ACCOUNT_SID") | |
| auth_token = os.getenv("TWILIO_AUTH_TOKEN") | |
| from_number = os.getenv("TWILIO_FROM") | |
| if not all([account_sid, auth_token, from_number]): | |
| logger.error("[LOGIN SMS] Configuració Twilio incompleta (TWILIO_ACCOUNT_SID/TWILIO_AUTH_TOKEN/TWILIO_FROM)") | |
| raise HTTPException(status_code=500, detail="Twilio config missing") | |
| twilio_url = f"https://api.twilio.com/2010-04-01/Accounts/{account_sid}/Messages.json" | |
| data = { | |
| "To": normalized_phone, | |
| "From": from_number, | |
| "Body": message, | |
| } | |
| logger.info(f"[LOGIN SMS] Enviant SMS via Twilio a {normalized_phone}...") | |
| resp = requests.post(twilio_url, data=data, auth=(account_sid, auth_token), timeout=10) | |
| logger.info(f"[LOGIN SMS] Resposta Twilio status={resp.status_code}") | |
| if resp.status_code not in (200, 201): | |
| logger.error(f"[LOGIN SMS] Error HTTP enviant SMS de login via Twilio: {resp.status_code} {resp.text}") | |
| raise HTTPException(status_code=500, detail="Error enviant SMS de login") | |
| logger.info(f"[LOGIN SMS] SMS de verificació enviat a {normalized_phone} via Twilio") | |
| return {"success": True} | |
| # 3) Si cap mecanisme està habilitat, retornar error de configuració | |
| logger.error("[LOGIN SMS] Cap mecanisme d'enviament SMS habilitat (ni Zapier ni Twilio)") | |
| raise HTTPException(status_code=500, detail="Cap mecanisme SMS habilitat") | |
| except HTTPException: | |
| raise | |
| except Exception as e: | |
| logger.error(f"[LOGIN SMS] Error inesperat enviant SMS de login: {e}") | |
| raise HTTPException(status_code=500, detail="Error inesperat enviant SMS de login") | |
| def record_decision(decision: ValidatorDecision): | |
| """Registra decisión de validador""" | |
| logger.info(f"[DECISION] Documento: {decision.document_id}") | |
| logger.info(f"[DECISION] Validador: {decision.validator_email}") | |
| logger.info(f"[DECISION] Decisión: {decision.decision}") | |
| logger.info(f"[DECISION] Comentarios: {decision.comments}") | |
| return {"success": True} | |
| def get_compliance_stats(): | |
| """Obtiene estadísticas de cumplimiento""" | |
| # Temporal: datos simulados | |
| return { | |
| "total_documents": 1247, | |
| "pending_validation": 23, | |
| "approved": 1180, | |
| "rejected": 44, | |
| "monthly_digests": 12, | |
| "last_digest": "2025-11", | |
| "gas_used_total": "0.42 MATIC", | |
| "last_updated": datetime.now().isoformat() | |
| } | |
| # === BLOCKCHAIN (POLYGON) === | |
| def publish_digest(request: DigestPublish): | |
| """Publica digest mensual en blockchain. | |
| Si es proporciona digest_hash, s'utilitza directament com a arrel del | |
| digest mensual. En cas contrari, es fan servir dades simulades. | |
| """ | |
| try: | |
| if request.digest_hash: | |
| # Ús directe del hash rebut (mode simplificat, semblant a events) | |
| digest_hash = request.digest_hash | |
| tx_hash = f"0x{digest_hash[:64]}" | |
| logger.info( | |
| f"[POLYGON MONTHLY DIGEST] period={request.period} hash={digest_hash} tx={tx_hash}" | |
| ) | |
| else: | |
| # Temporal: dades simulades per a demos sense digest pre-calculat | |
| authorizations = [ | |
| { | |
| "user_email": "user1@example.com", | |
| "video_hash": "abc123...", | |
| "timestamp": f"{request.period}-15T10:00:00Z", | |
| "consent_accepted": True, | |
| "validation_status": "approved", | |
| "document_id": f"doc_{request.period}_001", | |
| }, | |
| { | |
| "user_email": "user2@example.com", | |
| "video_hash": "def456...", | |
| "timestamp": f"{request.period}-20T14:30:00Z", | |
| "consent_accepted": True, | |
| "validation_status": "approved", | |
| "document_id": f"doc_{request.period}_002", | |
| }, | |
| ] | |
| tx_hash = publish_digest_polygon(request.period, authorizations) | |
| if not tx_hash: | |
| raise HTTPException(status_code=500, detail="Error publicando digest simulado") | |
| return {"transaction_hash": tx_hash} | |
| except HTTPException: | |
| raise | |
| except Exception as e: | |
| logger.error(f"Error publicant digest mensual: {e}") | |
| raise HTTPException(status_code=500, detail="Error publicant digest mensual") | |
| def publish_events_digest(request: EventsDigestPublish): | |
| """Publica el digest d'esdeveniments de sessió a Polygon. | |
| Rep un hash ja calculat (digest_hash) i un identificador de sessió, i retorna | |
| el hash de transacció i la URL de Polygonscan associada. | |
| """ | |
| try: | |
| digest_hash = request.digest_hash | |
| if not digest_hash: | |
| raise HTTPException(status_code=400, detail="digest_hash buit") | |
| # Simulació de publicació: derivar un tx_hash a partir del digest | |
| tx_hash = f"0x{digest_hash[:64]}" | |
| logger.info( | |
| f"[POLYGON EVENTS] session={request.session_id} hash={digest_hash} tx={tx_hash}" | |
| ) | |
| polygonscan_base = os.getenv("POLYGONSCAN_URL", "https://polygonscan.com/tx/") | |
| tx_url = polygonscan_base.rstrip("/") + "/" + tx_hash | |
| return { | |
| "transaction_hash": tx_hash, | |
| "transaction_url": tx_url, | |
| } | |
| # En producció, aquí s'utilitzaria Web3 per signar i enviar la | |
| # transacció al contracte HashLogger o equivalent. | |
| except HTTPException: | |
| raise | |
| except Exception as e: | |
| logger.error(f"Error publicant digest d'esdeveniments a Polygon: {e}") | |
| raise HTTPException(status_code=500, detail="Error publicant digest d'esdeveniments") | |
| def publish_actions_qldb(request: ActionsQLDBPayload): | |
| """Rep el conjunt de canvis d'actions.db per ser persistits a QLDB. | |
| Aquesta ruta és el punt d'entrada perquè una integració desplegada | |
| escrigui el contingut a una taula d'AWS QLDB. | |
| """ | |
| try: | |
| actions_count = len(request.actions or []) | |
| logger.info( | |
| f"[QLDB ACTIONS] session={request.session_id} actions={actions_count}" | |
| ) | |
| return { | |
| "stored": True, | |
| "actions_count": actions_count, | |
| } | |
| except Exception as e: | |
| logger.error(f"Error gestionant publicació d'actions a QLDB: {e}") | |
| raise HTTPException( | |
| status_code=500, | |
| detail="Error publicant canvis d'actions a QLDB", | |
| ) | |
| def get_digests(): | |
| """Obtiene lista de digest publicados""" | |
| # Temporal: datos simulados | |
| return { | |
| "digests": [ | |
| { | |
| "period": "2025-11", | |
| "transaction_hash": "0x0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", | |
| "block_number": 12345678, | |
| "timestamp": "2025-11-03T14:30:00Z", | |
| "authorization_count": 189 | |
| }, | |
| { | |
| "period": "2025-10", | |
| "transaction_hash": "0xfedcba9876543210fedcba9876543210fedcba9876543210fedcba9876543210", | |
| "block_number": 12345670, | |
| "timestamp": "2025-10-31T16:45:00Z", | |
| "authorization_count": 156 | |
| }, | |
| { | |
| "period": "2025-09", | |
| "transaction_hash": "0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890", | |
| "block_number": 12345660, | |
| "timestamp": "2025-09-30T12:15:00Z", | |
| "authorization_count": 134 | |
| } | |
| ] | |
| } | |
| def verify_digest(request: DigestVerify): | |
| """Verifica integridad de digest en blockchain""" | |
| # Temporal: siempre válido para períodos conocidos | |
| known_periods = ["2025-11", "2025-10", "2025-09", "2025-08"] | |
| logger.info(f"[VERIFY] Período solicitado: {request.period}") | |
| logger.info(f"[VERIFY] Hash esperado: {request.expected_hash[:16]}...") | |
| if request.period in known_periods: | |
| logger.info(f"[VERIFY] VALIDO - Período conocido") | |
| return {"valid": True} | |
| else: | |
| logger.info(f"[VERIFY] INVALIDO - Período desconocido") | |
| return {"valid": False} | |
| # --- STREAMLIT DASHBOARD (OPCIONAL) --- | |
| def st_dashboard(): | |
| """Dashboard interno para monitoreo con resiliencia""" | |
| st.set_page_config( | |
| page_title="Veureu Compliance", | |
| layout="wide", | |
| page_icon="🔐" | |
| ) | |
| st.title("Veureu Compliance Service") | |
| st.markdown("Microservicio de cumplimiento normativo con **resiliencia integrada**") | |
| # Obtener estado actual | |
| service_health = resilience_manager.get_service_health() | |
| service_status = get_service_status() | |
| # Métricas principales | |
| col1, col2, col3, col4 = st.columns(4) | |
| with col1: | |
| status_indicator = "OK" if service_health["status"] == "healthy" else "WARN" | |
| st.metric("Estado", f"{status_indicator} {service_health['status'].title()}") | |
| with col2: | |
| env_indicator = "SIM" if config.is_simulation() else "PROD" | |
| st.metric("Modo", f"{env_indicator} {config.ENVIRONMENT.title()}") | |
| with col3: | |
| st.metric("Uptime", service_health["uptime_human"]) | |
| with col4: | |
| error_rate = ( | |
| service_health["error_count"] / max(service_health["total_processed"], 1) * 100 | |
| ) | |
| st.metric("Error Rate", f"{error_rate:.1f}%") | |
| # Tabs con información de resiliencia | |
| tab1, tab2, tab3, tab4, tab5 = st.tabs([ | |
| "Estado General", | |
| "Resiliencia", | |
| "Blockchain", | |
| "Logs", | |
| "Configuración" | |
| ]) | |
| with tab1: | |
| st.subheader("Estado General del Servicio") | |
| # Métricas detalladas | |
| col1, col2, col3 = st.columns(3) | |
| with col1: | |
| st.success("Autenticacion: Activa") | |
| st.info("QLDB: Simulado") | |
| st.info("Email: Simulado") | |
| with col2: | |
| st.info("Polygon: Simulado") | |
| st.success("API: Funcionando") | |
| st.success("Dashboard: Activo") | |
| with col3: | |
| st.metric("Total Endpoints", "15") | |
| st.metric("Procesados", service_health["total_processed"]) | |
| st.metric("Errores", service_health["error_count"]) | |
| # Estado de validaciones | |
| st.markdown("### Estado de Validaciones") | |
| col1, col2, col3 = st.columns(3) | |
| with col1: | |
| st.metric("Validaciones Pendientes", service_health["pending_validations"]) | |
| with col2: | |
| last_digest = service_health["last_digest_publish"] or "Nunca" | |
| st.metric("Ultimo Digest", last_digest) | |
| with col3: | |
| st.metric("Ultima Sinc QLDB", service_health["last_qldb_sync"][:10]) | |
| with tab2: | |
| st.subheader("Sistema de Resiliencia") | |
| # Información de resiliencia | |
| st.markdown("### Estado de Recuperacion") | |
| recovery_report = recovery_manager.perform_recovery_check() | |
| if recovery_report["recovery_needed"]: | |
| st.warning(f"Se necesitan {recovery_report['total_actions']} acciones de recuperacion") | |
| for action in recovery_report["recovery_actions"]: | |
| priority_text = "ALTA" if action["priority"] == "high" else "MEDIA" if action["priority"] == "medium" else "BAJA" | |
| st.markdown(f"[{priority_text}] **{action['action']}** (Prioridad: {action['priority']})") | |
| else: | |
| st.success("No se requieren acciones de recuperacion") | |
| # Controles de recuperacion | |
| st.markdown("### Controles de Recuperacion") | |
| col1, col2 = st.columns(2) | |
| with col1: | |
| if st.button("Verificar Estado", type="secondary"): | |
| with st.spinner("Verificando estado de recuperacion..."): | |
| recovery_report = recovery_manager.perform_recovery_check() | |
| st.json(recovery_report) | |
| with col2: | |
| if st.button("Forzar Recuperacion", type="secondary"): | |
| with st.spinner("Forzando recuperacion completa..."): | |
| recovery_report = recovery_manager.force_full_recovery() | |
| st.success("Recuperacion forzada completada") | |
| st.json(recovery_report) | |
| # Información de persistencia | |
| st.markdown("### Persistencia de Estado") | |
| col1, col2 = st.columns(2) | |
| with col1: | |
| st.code(f"Archivo de estado: {resilience_manager.state_file}") | |
| st.metric("Inicio Servicio", service_health["service_start"][:19]) | |
| with col2: | |
| st.info("**Caracteristicas de Resiliencia**") | |
| st.markdown("• Persistencia de estado en disco") | |
| st.markdown("• Recuperacion automatica al inicio") | |
| st.markdown("• Manejo de desconexiones HF") | |
| st.markdown("• Datos seguros en AWS QLDB") | |
| st.markdown("• Blockchain inmutable") | |
| with tab3: | |
| st.subheader("Configuracion Blockchain") | |
| col1, col2 = st.columns(2) | |
| with col1: | |
| st.markdown("**Configuracion Red**") | |
| st.code(f"RPC URL: {POLYGON_RPC_URL}") | |
| st.code(f"Chain ID: {POLYGON_CHAIN_ID}") | |
| st.code(f"Contract: {DIGEST_CONTRACT_ADDR or 'No configurado'}") | |
| with col2: | |
| st.markdown("**Estado Wallet**") | |
| if POLYGON_PRIVATE_KEY: | |
| st.success("Private Key configurada") | |
| else: | |
| st.warning("Private Key no configurada") | |
| st.markdown("**Contrato**") | |
| if DIGEST_CONTRACT_ABI: | |
| st.success("ABI configurado") | |
| else: | |
| st.warning("ABI no configurado") | |
| with tab4: | |
| st.subheader("Logs Recientes") | |
| log_area = st.empty() | |
| # Mostrar logs simulados | |
| logs = [ | |
| "[INFO] Servicio iniciado correctamente", | |
| "[INFO] Modo simulacion activado", | |
| "[INFO] Endpoints registrados: 15", | |
| "[INFO] Health check funcionando", | |
| "[POLYGON] Listo para publicacion de digest", | |
| "[QLDB] Listo para registro de consentimientos", | |
| "[EMAIL] Listo para notificaciones", | |
| "[INFO] Esperando solicitudes del space demo..." | |
| ] | |
| for log in logs: | |
| st.code(log) | |
| with tab5: | |
| st.subheader("Configuracion del Sistema") | |
| st.markdown("**Variables de Entorno**") | |
| env_vars = { | |
| "POLYGON_RPC_URL": POLYGON_RPC_URL, | |
| "POLYGON_CHAIN_ID": POLYGON_CHAIN_ID, | |
| "DIGEST_CONTRACT_ADDR": DIGEST_CONTRACT_ADDR or "No configurado", | |
| "POLYGON_PRIVATE_KEY": "Configurada" if POLYGON_PRIVATE_KEY else "No configurada", | |
| "DIGEST_CONTRACT_ABI": "Configurado" if DIGEST_CONTRACT_ABI else "No configurado" | |
| } | |
| for key, value in env_vars.items(): | |
| st.code(f"{key}: {value}") | |
| st.markdown("---") | |
| st.markdown("**Para activar producción:**") | |
| st.markdown("1. Configurar variables de entorno reales") | |
| st.markdown("2. Descomentar código de integración") | |
| st.markdown("3. Instalar dependencias adicionales") | |
| st.markdown("4. Reiniciar servicio") | |
| if st.button("Reiniciar Servicio", type="secondary"): | |
| st.rerun() | |
| if __name__ == "__main__": | |
| # Ejecutar como Streamlit app para dashboard interno | |
| st_dashboard() | |
| else: | |
| # Ejecutar como FastAPI para producción | |
| logger.info("Iniciando Veureu Compliance Service como API") | |
| # uvicorn.run(app, host="0.0.0.0", port=7860) | |