issoufzousko07 commited on
Commit
7a317d2
·
1 Parent(s): c34ac93

Add all backend files for HF Space deployment

Browse files
Files changed (6) hide show
  1. Dockerfile +38 -0
  2. database.py +513 -0
  3. encryption.py +71 -0
  4. localization.py +83 -0
  5. requirements.txt +21 -0
  6. storage.py +92 -0
Dockerfile ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Hugging Face Spaces Docker Configuration
2
+ FROM python:3.10-slim
3
+
4
+ # Create non-root user (required by HuggingFace)
5
+ RUN useradd -m -u 1000 user
6
+ ENV HOME=/home/user
7
+ ENV PATH="/home/user/.local/bin:$PATH"
8
+
9
+ WORKDIR /app
10
+
11
+ # Install system dependencies as root
12
+ RUN apt-get update && apt-get install -y --no-install-recommends \
13
+ libgl1 \
14
+ libglib2.0-0 \
15
+ libsm6 \
16
+ libxext6 \
17
+ libxrender1 \
18
+ && rm -rf /var/lib/apt/lists/*
19
+
20
+ # Create directories as root BEFORE switching user
21
+ RUN mkdir -p /app/storage/uploads /app/storage/processed && \
22
+ chown -R user:user /app
23
+
24
+ # Switch to non-root user
25
+ USER user
26
+
27
+ # Copy requirements and install
28
+ COPY --chown=user requirements.txt .
29
+ RUN pip install --no-cache-dir --user -r requirements.txt
30
+
31
+ # Copy the rest of the application
32
+ COPY --chown=user . /app
33
+
34
+ # Expose port 7860 (required by Hugging Face Spaces)
35
+ EXPOSE 7860
36
+
37
+ # Run the application
38
+ CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
database.py ADDED
@@ -0,0 +1,513 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sqlite3
2
+ import os
3
+ import logging
4
+ from typing import Optional, List, Dict, Any
5
+ from enum import Enum
6
+
7
+ class JobStatus(str, Enum):
8
+ PENDING = "pending"
9
+ PROCESSING = "processing"
10
+ COMPLETED = "completed"
11
+ FAILED = "failed"
12
+
13
+ BASE_DIR = os.path.dirname(os.path.abspath(__file__))
14
+ # HUGGING FACE PERSISTENCE FIX: Use /data if available
15
+ if os.path.exists('/data'):
16
+ DB_NAME = '/data/elephmind.db'
17
+ logging.info("Using PERSISTENT storage at /data/elephmind.db")
18
+ else:
19
+ DB_NAME = os.path.join(BASE_DIR, "elephmind.db")
20
+ logging.info(f"Using LOCAL storage at {DB_NAME}")
21
+
22
+ def get_db_connection():
23
+ conn = sqlite3.connect(DB_NAME)
24
+ conn.row_factory = sqlite3.Row
25
+ return conn
26
+
27
+ def init_db():
28
+ conn = get_db_connection()
29
+ c = conn.cursor()
30
+
31
+ # Create Users Table
32
+ c.execute('''
33
+ CREATE TABLE IF NOT EXISTS users (
34
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
35
+ username TEXT UNIQUE NOT NULL,
36
+ hashed_password TEXT NOT NULL,
37
+ email TEXT,
38
+ security_question TEXT NOT NULL,
39
+ security_answer TEXT NOT NULL,
40
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
41
+ )
42
+ ''')
43
+
44
+ # Create Feedback Table
45
+ c.execute('''
46
+ CREATE TABLE IF NOT EXISTS feedback (
47
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
48
+ username TEXT,
49
+ rating INTEGER,
50
+ comment TEXT,
51
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
52
+ )
53
+ ''')
54
+
55
+ # Create Audit Log Table (RGPD Compliance)
56
+ c.execute('''
57
+ CREATE TABLE IF NOT EXISTS audit_log (
58
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
59
+ username TEXT,
60
+ action TEXT NOT NULL,
61
+ resource TEXT,
62
+ ip_address TEXT,
63
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
64
+ )
65
+ ''')
66
+
67
+ # --- MIGRATIONS ---
68
+ # Ensure security columns exist (backward compatibility)
69
+ try:
70
+ c.execute("ALTER TABLE users ADD COLUMN security_question TEXT DEFAULT 'Question?'")
71
+ except sqlite3.OperationalError:
72
+ pass # Column exists
73
+
74
+ try:
75
+ c.execute("ALTER TABLE users ADD COLUMN security_answer TEXT DEFAULT 'answer'")
76
+ except sqlite3.OperationalError:
77
+ pass # Column exists
78
+ # ------------------
79
+
80
+ # Create Patients Table
81
+ c.execute('''
82
+ CREATE TABLE IF NOT EXISTS patients (
83
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
84
+ patient_id TEXT UNIQUE NOT NULL, -- e.g. PAT-2026-1234
85
+ owner_username TEXT NOT NULL,
86
+ first_name TEXT,
87
+ last_name TEXT,
88
+ birth_date TEXT,
89
+ photo TEXT, -- Stores base64 or URL
90
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
91
+ FOREIGN KEY(owner_username) REFERENCES users(username)
92
+ )
93
+ ''')
94
+
95
+ # Create Jobs Table (PERSISTENCE)
96
+ c.execute('''
97
+ CREATE TABLE IF NOT EXISTS jobs (
98
+ id TEXT PRIMARY KEY,
99
+ status TEXT NOT NULL,
100
+ result TEXT, -- JSON serialized
101
+ error TEXT,
102
+ created_at REAL,
103
+ storage_path TEXT,
104
+ username TEXT,
105
+ file_type TEXT,
106
+ FOREIGN KEY(username) REFERENCES users(username)
107
+ )
108
+ ''')
109
+
110
+ conn.commit()
111
+ conn.close()
112
+ logging.info(f"Database {DB_NAME} initialized successfully.")
113
+
114
+ # --- User Operations ---
115
+
116
+ def create_user(user: Dict[str, Any]) -> bool:
117
+ try:
118
+ conn = get_db_connection()
119
+ c = conn.cursor()
120
+ c.execute('''
121
+ INSERT INTO users (username, hashed_password, email, security_question, security_answer)
122
+ VALUES (?, ?, ?, ?, ?)
123
+ ''', (
124
+ user['username'],
125
+ user['hashed_password'],
126
+ user.get('email', ''),
127
+ user['security_question'],
128
+ user['security_answer']
129
+ ))
130
+ conn.commit()
131
+ return True
132
+ except sqlite3.IntegrityError:
133
+ return False
134
+ except Exception as e:
135
+ logging.error(f"Error creating user: {e}")
136
+ return False
137
+ finally:
138
+ conn.close()
139
+
140
+ def get_user_by_username(username: str) -> Optional[Dict[str, Any]]:
141
+ conn = get_db_connection()
142
+ c = conn.cursor()
143
+ c.execute('SELECT * FROM users WHERE username = ?', (username,))
144
+ row = c.fetchone()
145
+ conn.close()
146
+ if row:
147
+ return dict(row)
148
+ return None
149
+
150
+ def update_password(username: str, new_hashed_password: str) -> bool:
151
+ try:
152
+ conn = get_db_connection()
153
+ c = conn.cursor()
154
+ c.execute('UPDATE users SET hashed_password = ? WHERE username = ?', (new_hashed_password, username))
155
+ conn.commit()
156
+ conn.close()
157
+ return True
158
+ except Exception as e:
159
+ logging.error(f"Error updating password: {e}")
160
+ return False
161
+
162
+ # --- Feedback Operations ---
163
+
164
+ def add_feedback(username: str, rating: int, comment: str):
165
+ conn = get_db_connection()
166
+ c = conn.cursor()
167
+ c.execute('INSERT INTO feedback (username, rating, comment) VALUES (?, ?, ?)', (username, rating, comment))
168
+ conn.commit()
169
+ conn.close()
170
+
171
+ # --- Audit Log Operations (RGPD Compliance) ---
172
+
173
+ def log_audit(username: str, action: str, resource: str = None, ip_address: str = None):
174
+ """Log user actions for RGPD compliance and security auditing."""
175
+ try:
176
+ conn = get_db_connection()
177
+ c = conn.cursor()
178
+ c.execute(
179
+ 'INSERT INTO audit_log (username, action, resource, ip_address) VALUES (?, ?, ?, ?)',
180
+ (username, action, resource, ip_address)
181
+ )
182
+ conn.commit()
183
+ conn.close()
184
+ except Exception as e:
185
+ logging.error(f"Error logging audit: {e}")
186
+
187
+ def get_user_audit_log(username: str, limit: int = 100) -> List[Dict[str, Any]]:
188
+ """Get audit log for a specific user."""
189
+ conn = get_db_connection()
190
+ c = conn.cursor()
191
+ c.execute(
192
+ 'SELECT * FROM audit_log WHERE username = ? ORDER BY created_at DESC LIMIT ?',
193
+ (username, limit)
194
+ )
195
+ rows = c.fetchall()
196
+ conn.close()
197
+ return [dict(row) for row in rows]
198
+
199
+ # --- Analysis Registry (REAL DATA ONLY) ---
200
+
201
+ def init_analysis_registry():
202
+ """Create the analysis_registry table if it doesn't exist."""
203
+ conn = get_db_connection()
204
+ c = conn.cursor()
205
+ c.execute('''
206
+ CREATE TABLE IF NOT EXISTS analysis_registry (
207
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
208
+ username TEXT NOT NULL,
209
+ domain TEXT NOT NULL,
210
+ top_diagnosis TEXT,
211
+ confidence REAL,
212
+ priority TEXT,
213
+ computation_time_ms INTEGER,
214
+ file_type TEXT,
215
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
216
+ )
217
+ ''')
218
+ conn.commit()
219
+ conn.close()
220
+
221
+ def log_analysis(
222
+ username: str,
223
+ domain: str,
224
+ top_diagnosis: str,
225
+ confidence: float,
226
+ priority: str,
227
+ computation_time_ms: int,
228
+ file_type: str
229
+ ) -> bool:
230
+ """Log a real analysis to the registry. NO FAKE DATA."""
231
+ try:
232
+ conn = get_db_connection()
233
+ c = conn.cursor()
234
+ c.execute('''
235
+ INSERT INTO analysis_registry
236
+ (username, domain, top_diagnosis, confidence, priority, computation_time_ms, file_type)
237
+ VALUES (?, ?, ?, ?, ?, ?, ?)
238
+ ''', (username, domain, top_diagnosis, confidence, priority, computation_time_ms, file_type))
239
+ conn.commit()
240
+ conn.close()
241
+ return True
242
+ except Exception as e:
243
+ logging.error(f"Error logging analysis: {e}")
244
+ return False
245
+
246
+ def get_dashboard_stats(username: str) -> Dict[str, Any]:
247
+ """Get real dashboard statistics for a user. Returns zeros if no data."""
248
+ conn = get_db_connection()
249
+ c = conn.cursor()
250
+
251
+ # Total count
252
+ c.execute('SELECT COUNT(*) FROM analysis_registry WHERE username = ?', (username,))
253
+ total = c.fetchone()[0]
254
+
255
+ # By domain
256
+ c.execute('''
257
+ SELECT domain, COUNT(*) as count
258
+ FROM analysis_registry
259
+ WHERE username = ?
260
+ GROUP BY domain
261
+ ''', (username,))
262
+ by_domain = {row['domain']: row['count'] for row in c.fetchall()}
263
+
264
+ # By priority
265
+ c.execute('''
266
+ SELECT priority, COUNT(*) as count
267
+ FROM analysis_registry
268
+ WHERE username = ?
269
+ GROUP BY priority
270
+ ''', (username,))
271
+ by_priority = {row['priority']: row['count'] for row in c.fetchall()}
272
+
273
+ # Average computation time
274
+ c.execute('''
275
+ SELECT AVG(computation_time_ms)
276
+ FROM analysis_registry
277
+ WHERE username = ?
278
+ ''', (username,))
279
+ avg_time = c.fetchone()[0] or 0
280
+
281
+ conn.close()
282
+
283
+ return {
284
+ "total_analyses": total,
285
+ "by_domain": by_domain,
286
+ "by_priority": by_priority,
287
+ "avg_computation_time_ms": round(avg_time, 0)
288
+ }
289
+
290
+ def get_recent_analyses(username: str, limit: int = 10) -> List[Dict[str, Any]]:
291
+ """Get recent real analyses for a user. Returns empty list if none."""
292
+ conn = get_db_connection()
293
+ c = conn.cursor()
294
+ c.execute('''
295
+ SELECT id, domain, top_diagnosis, confidence, priority, computation_time_ms, file_type, created_at
296
+ FROM analysis_registry
297
+ WHERE username = ?
298
+ ORDER BY created_at DESC
299
+ LIMIT ?
300
+ ''', (username, limit))
301
+ rows = c.fetchall()
302
+ conn.close()
303
+ return [dict(row) for row in rows]
304
+
305
+ # --- Patient Operations (New for Migration) ---
306
+
307
+ def create_patient(
308
+ owner_username: str,
309
+ patient_id: str,
310
+ first_name: str,
311
+ last_name: str,
312
+ birth_date: str,
313
+ photo: str
314
+ ) -> Optional[int]:
315
+ """Create a new patient record."""
316
+ try:
317
+ conn = get_db_connection()
318
+ c = conn.cursor()
319
+ c.execute('''
320
+ INSERT INTO patients (owner_username, patient_id, first_name, last_name, birth_date, photo)
321
+ VALUES (?, ?, ?, ?, ?, ?)
322
+ ''', (owner_username, patient_id, first_name, last_name, birth_date, photo))
323
+ patient_id_db = c.lastrowid
324
+ conn.commit()
325
+ conn.close()
326
+ return patient_id_db
327
+ except Exception as e:
328
+ logging.error(f"Error creating patient: {e}")
329
+ return None
330
+
331
+ def get_patients_by_user(username: str) -> List[Dict[str, Any]]:
332
+ """Get all patients belonging to a user."""
333
+ conn = get_db_connection()
334
+ c = conn.cursor()
335
+ c.execute('SELECT * FROM patients WHERE owner_username = ? ORDER BY created_at DESC', (username,))
336
+ rows = c.fetchall()
337
+ conn.close()
338
+ return [dict(row) for row in rows]
339
+
340
+ def delete_patient(username: str, patient_db_id: int) -> bool:
341
+ """Delete a patient record if owned by user."""
342
+ try:
343
+ conn = get_db_connection()
344
+ c = conn.cursor()
345
+ c.execute('DELETE FROM patients WHERE id = ? AND owner_username = ?', (patient_db_id, username))
346
+ count = c.rowcount
347
+ conn.commit()
348
+ conn.close()
349
+ return count > 0
350
+ except Exception as e:
351
+ logging.error(f"Error deleting patient: {e}")
352
+ return False
353
+
354
+ def update_patient(username: str, patient_db_id: int, updates: Dict[str, Any]) -> bool:
355
+ """Update patient fields."""
356
+ try:
357
+ conn = get_db_connection()
358
+ c = conn.cursor()
359
+
360
+ # Build query dynamically
361
+ fields = []
362
+ values = []
363
+ for k, v in updates.items():
364
+ if k in ['first_name', 'last_name', 'birth_date', 'photo']:
365
+ fields.append(f"{k} = ?")
366
+ values.append(v)
367
+
368
+ if not fields:
369
+ return False
370
+
371
+ values.extend([patient_db_id, username])
372
+ query = f"UPDATE patients SET {', '.join(fields)} WHERE id = ? AND owner_username = ?"
373
+
374
+
375
+ c.execute(query, values)
376
+ count = c.rowcount
377
+ conn.commit()
378
+ conn.close()
379
+ return count > 0
380
+ except Exception as e:
381
+ logging.error(f"Error updating patient: {e}")
382
+ return False
383
+
384
+ # --- Job Operations (Persistence) ---
385
+
386
+ import json
387
+
388
+ def create_job(job_data: Dict[str, Any]):
389
+ """Create a new job record."""
390
+ try:
391
+ conn = get_db_connection()
392
+ c = conn.cursor()
393
+ c.execute('''
394
+ INSERT INTO jobs (id, status, result, error, created_at, storage_path, username, file_type)
395
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
396
+ ''', (
397
+ job_data['id'],
398
+ job_data.get('status', 'pending'),
399
+ json.dumps(job_data.get('result')) if job_data.get('result') else None,
400
+ job_data.get('error'),
401
+ job_data['created_at'],
402
+ job_data.get('storage_path'),
403
+ job_data.get('username'),
404
+ job_data.get('file_type')
405
+ ))
406
+ conn.commit()
407
+ conn.close()
408
+ return True
409
+ except Exception as e:
410
+ logging.error(f"Error creating job: {e}")
411
+ return False
412
+
413
+ def get_job(job_id: str, username: Optional[str] = None) -> Optional[Dict[str, Any]]:
414
+ """Retrieve job by ID, optionally enforcing ownership via SQL."""
415
+ conn = get_db_connection()
416
+ c = conn.cursor()
417
+
418
+ if username:
419
+ c.execute('SELECT * FROM jobs WHERE id = ? AND username = ?', (job_id, username))
420
+ else:
421
+ c.execute('SELECT * FROM jobs WHERE id = ?', (job_id,))
422
+
423
+ row = c.fetchone()
424
+ conn.close()
425
+
426
+ if row:
427
+ job = dict(row)
428
+ if job['result']:
429
+ try:
430
+ job['result'] = json.loads(job['result'])
431
+ except:
432
+ job['result'] = None
433
+ return job
434
+ return None
435
+
436
+ def update_job_status(job_id: str, status: str, result: Optional[Dict] = None, error: Optional[str] = None):
437
+ """Update job status and result."""
438
+ try:
439
+ conn = get_db_connection()
440
+ c = conn.cursor()
441
+
442
+ updates = ["status = ?"]
443
+ params = [status]
444
+
445
+ if result is not None:
446
+ updates.append("result = ?")
447
+ params.append(json.dumps(result))
448
+
449
+ if error is not None:
450
+ updates.append("error = ?")
451
+ params.append(error)
452
+
453
+ params.append(job_id)
454
+
455
+ query = f"UPDATE jobs SET {', '.join(updates)} WHERE id = ?"
456
+ c.execute(query, params)
457
+ conn.commit()
458
+ conn.close()
459
+ return True
460
+ except Exception as e:
461
+ logging.error(f"Error updating job: {e}")
462
+ return False
463
+
464
+
465
+
466
+ def get_latest_job(username: str) -> Optional[Dict[str, Any]]:
467
+ """Retrieve the most recent job for a user."""
468
+ conn = get_db_connection()
469
+ c = conn.cursor()
470
+ c.execute('''
471
+ SELECT * FROM jobs
472
+ WHERE username = ?
473
+ ORDER BY created_at DESC
474
+ LIMIT 1
475
+ ''', (username,))
476
+ row = c.fetchone()
477
+ conn.close()
478
+
479
+ if row:
480
+ job = dict(row)
481
+ if job['result']:
482
+ try:
483
+ job['result'] = json.loads(job['result'])
484
+ except:
485
+ job['result'] = None
486
+ return job
487
+ return None
488
+
489
+ def get_active_job_by_image(username: str, image_id: str) -> Optional[Dict[str, Any]]:
490
+ """
491
+ Retrieve the most recent job for a specific image and user.
492
+ Used for Idempotence (Strict Lifecycle).
493
+ """
494
+ conn = get_db_connection()
495
+ c = conn.cursor()
496
+ c.execute('''
497
+ SELECT * FROM jobs
498
+ WHERE username = ? AND storage_path = ?
499
+ ORDER BY created_at DESC
500
+ LIMIT 1
501
+ ''', (username, image_id))
502
+ row = c.fetchone()
503
+ conn.close()
504
+
505
+ if row:
506
+ job = dict(row)
507
+ if job['result']:
508
+ try:
509
+ job['result'] = json.loads(job['result'])
510
+ except:
511
+ job['result'] = None
512
+ return job
513
+ return None
encryption.py ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from cryptography.fernet import Fernet
2
+ import os
3
+ import sys
4
+ import logging
5
+ from typing import Optional
6
+
7
+ # -------------------------------------------------------------------------
8
+ # ENCRYPTION CONFIGURATION - PRODUCTION READY
9
+ # -------------------------------------------------------------------------
10
+
11
+ # Environment detection
12
+ ENVIRONMENT = os.getenv("ENVIRONMENT", "development")
13
+ IS_PRODUCTION = ENVIRONMENT == "production"
14
+
15
+ # Encryption Key - Load from environment variable
16
+ ENCRYPTION_KEY = os.getenv("ENCRYPTION_KEY")
17
+
18
+ if not ENCRYPTION_KEY:
19
+ if IS_PRODUCTION:
20
+ logging.critical("🔴 FATAL ERROR: ENCRYPTION_KEY must be set in production environment")
21
+ logging.critical("Generate one with: python -c 'from cryptography.fernet import Fernet; print(Fernet.generate_key().decode())'")
22
+ sys.exit(1) # Fail-fast in production
23
+ else:
24
+ # Development fallback with ephemeral key
25
+ ENCRYPTION_KEY = Fernet.generate_key().decode()
26
+ logging.warning("⚠️ WARNING: Using ephemeral encryption key (development only)")
27
+
28
+ # Initialize cipher
29
+ cipher_suite = Fernet(ENCRYPTION_KEY.encode() if isinstance(ENCRYPTION_KEY, str) else ENCRYPTION_KEY)
30
+
31
+ def encrypt_data(data: str) -> str:
32
+ """
33
+ Encrypts a string and returns the encrypted token as a string.
34
+ """
35
+ if not data: return ""
36
+ encrypted_bytes = cipher_suite.encrypt(data.encode('utf-8'))
37
+ return encrypted_bytes.decode('utf-8')
38
+
39
+ def decrypt_data(token: str) -> Optional[str]:
40
+ """
41
+ Decrypts a token and returns the original string.
42
+ """
43
+ if not token: return None
44
+ try:
45
+ decrypted_bytes = cipher_suite.decrypt(token.encode('utf-8'))
46
+ return decrypted_bytes.decode('utf-8')
47
+ except Exception as e:
48
+ print(f"Decryption failed: {e}")
49
+ return None
50
+
51
+ def rotate_key():
52
+ """
53
+ Example function to rotate keys (advanced).
54
+ """
55
+ global key, cipher_suite
56
+ key = Fernet.generate_key()
57
+ cipher_suite = Fernet(key)
58
+ with open(ENCRYPTION_KEY_PATH, "wb") as key_file:
59
+ key_file.write(key)
60
+ print(f"New key generated and saved to {ENCRYPTION_KEY_PATH}")
61
+
62
+ if __name__ == "__main__":
63
+ # Test
64
+ original = "Jean Dupont - Patient Zero"
65
+ encrypted = encrypt_data(original)
66
+ decrypted = decrypt_data(encrypted)
67
+
68
+ print(f"Original: {original}")
69
+ print(f"Encrypted: {encrypted}")
70
+ print(f"Decrypted: {decrypted}")
71
+ assert original == decrypted
localization.py ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Mappings de localisation (Anglais -> Français)
2
+ # Ce fichier permet de traduire les résultats de l'IA sans modifier les prompts originaux
3
+ # qui doivent rester en anglais pour la performance du modèle.
4
+
5
+ DOMAIN_TRANSLATIONS = {
6
+ 'Thoracic': {
7
+ 'label': 'Thoracique',
8
+ 'description': 'Analyse Radiographique du Thorax'
9
+ },
10
+ 'Dermatology': {
11
+ 'label': 'Dermatologie',
12
+ 'description': 'Analyse Dermatoscope des Lésions Cutanées'
13
+ },
14
+ 'Histology': {
15
+ 'label': 'Histologie',
16
+ 'description': 'Analyse Microscopique (H&E)'
17
+ },
18
+ 'Ophthalmology': {
19
+ 'label': 'Ophtalmologie',
20
+ 'description': 'Fond d\'Oeil (Rétine)'
21
+ },
22
+ 'Orthopedics': {
23
+ 'label': 'Orthopédie',
24
+ 'description': 'Radiographie Osseuse'
25
+ }
26
+ }
27
+
28
+ LABEL_TRANSLATIONS = {
29
+ # --- THORACIC ---
30
+ 'Diffuse interstitial opacities or ground-glass pattern (Viral/Atypical Pneumonia)':
31
+ 'Opacités interstitielles diffuses ou aspect en verre dépoli (Pneumonie Virale/Atypique)',
32
+
33
+ 'Focal alveolar consolidation with air bronchograms (Bacterial Pneumonia)':
34
+ 'Condensation alvéolaire focale avec bronchogrammes aériens (Pneumonie Bactérienne)',
35
+
36
+ 'Perfectly clear lungs, sharp costophrenic angles, no pathology':
37
+ 'Poumons parfaitement clairs, angles costophréniques nets, aucune pathologie',
38
+
39
+ 'Pneumothorax (Lung collapse)': 'Pneumothorax (Décollement de la plèvre)',
40
+ 'Pleural Effusion (Fluid)': 'Épanchement Pleural (Liquide)',
41
+ 'Cardiomegaly (Enlarged heart)': 'Cardiomégalie (Cœur élargi)',
42
+ 'Pulmonary Edema': 'Œdème Pulmonaire',
43
+ 'Lung Nodule or Mass': 'Nodule ou Masse Pulmonaire',
44
+ 'Atelectasis (Lung collapse)': 'Atélectasie (Affaissement pulmonaire)',
45
+
46
+ # --- DERMATOLOGY ---
47
+ 'A healthy skin area without lesion': 'Zone de peau saine sans lésion',
48
+ 'A benign nevus (mole) regular, symmetrical and homogeneous': 'Nævus bénin (grain de beauté) régulier, symétrique et homogène',
49
+ 'A seborrheic keratosis (benign warty lesion)': 'Kératose séborrhéique (lésion verruqueuse bénigne)',
50
+ 'A malignant melanoma with asymmetry, irregular borders and multiple colors': 'Mélanome malin (Asymétrie, Bords irréguliers, Couleurs multiples)',
51
+ 'A basal cell carcinoma (pearly or ulcerated lesion)': 'Carcinome basocellulaire (lésion perlée ou ulcérée)',
52
+ 'A squamous cell carcinoma (crusty or budding lesion)': 'Carcinome épidermoïde (lésion croûteuse ou bourgeonnante)',
53
+ 'A non-specific inflammatory skin lesion': 'Lésion cutanée inflammatoire non spécifique',
54
+
55
+ # --- ORTHOPEDICS ---
56
+ 'Severe osteoarthritis with bone-on-bone contact and large osteophytes (Grade 4)': 'Arthrose sévère avec contact os-contre-os et ostéophytes importants (Grade 4)',
57
+ 'Moderate osteoarthritis with definite joint space narrowing (Grade 2-3)': 'Arthrose modérée avec pincement articulaire net (Grade 2-3)',
58
+ 'Normal knee joint with preserved joint space and no osteophytes (Grade 0-1)': 'Genou normal, interligne articulaire préservé (Grade 0-1)',
59
+ 'Total knee arthroplasty (TKA) with metallic implant': 'Prothèse totale de genou (implant métallique)',
60
+ 'Acute knee fracture or dislocation': 'Fracture ou luxation aiguë du genou',
61
+ 'Other x-ray view (Chest, Hand, Foot, Pediatric) - OUT OF DISTRIBUTION': 'Autre vue radiographique (Hors périmètre)',
62
+ 'A knee x-ray view (Knee Joint)': 'Radiographie du Genou'
63
+ }
64
+
65
+ def localize_result(result_json):
66
+ """
67
+ Traduit les résultats bruts (Anglais) en Français
68
+ en utilisant les dictionnaires de mapping.
69
+ """
70
+ # 1. Localiser le Domaine
71
+ domain_key = result_json['domain']['label']
72
+ if domain_key in DOMAIN_TRANSLATIONS:
73
+ result_json['domain']['label'] = DOMAIN_TRANSLATIONS[domain_key]['label']
74
+ result_json['domain']['description'] = DOMAIN_TRANSLATIONS[domain_key]['description']
75
+
76
+ # 2. Localiser les Résultats Spécifiques
77
+ for item in result_json['specific']:
78
+ original_label = item['label']
79
+ if original_label in LABEL_TRANSLATIONS:
80
+ item['label'] = LABEL_TRANSLATIONS[original_label]
81
+ # Si pas de traduction trouvée, on garde l'anglais (fallback)
82
+
83
+ return result_json
requirements.txt ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ fastapi
2
+ uvicorn
3
+ python-multipart
4
+ requests
5
+ transformers
6
+ torch
7
+ Pillow
8
+ sentencepiece
9
+ pydicom
10
+ numpy
11
+ grad-cam
12
+ python-jose[cryptography]
13
+ passlib
14
+ argon2-cffi
15
+ bcrypt==4.0.1
16
+ cryptography
17
+ python-dotenv
18
+ opencv-python
19
+ python-swiftclient
20
+ protobuf
21
+ huggingface_hub
storage.py ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import abc
3
+ from datetime import datetime
4
+
5
+ class StorageProvider(abc.ABC):
6
+ @abc.abstractmethod
7
+ def save_file(self, file_bytes: bytes, filename: str) -> str:
8
+ pass
9
+
10
+ @abc.abstractmethod
11
+ def get_file(self, filename: str) -> bytes:
12
+ pass
13
+
14
+ class LocalStorage(StorageProvider):
15
+ def __init__(self, base_dir="data_storage"):
16
+ self.base_dir = base_dir
17
+ os.makedirs(base_dir, exist_ok=True)
18
+
19
+ def save_file(self, file_bytes: bytes, filename: str) -> str:
20
+ # Prepend timestamp to avoid collision
21
+ ts = datetime.now().strftime("%Y%m%d_%H%M%S")
22
+ safe_name = f"{ts}_{filename}"
23
+ path = os.path.join(self.base_dir, safe_name)
24
+ with open(path, "wb") as f:
25
+ f.write(file_bytes)
26
+ return path
27
+
28
+ def get_file(self, filename: str) -> bytes:
29
+ path = os.path.join(self.base_dir, filename)
30
+ if not os.path.exists(path):
31
+ return None
32
+ with open(path, "rb") as f:
33
+ return f.read()
34
+
35
+ class SwiftStorage(StorageProvider):
36
+ """
37
+ OpenStack Swift Storage Provider.
38
+ Requires python-swiftclient installed.
39
+ """
40
+ def __init__(self, auth_url, username, password, project_name, container_name="elephmind_images"):
41
+ # Import here to avoid error on Windows if not installed
42
+ try:
43
+ from swiftclient import Connection
44
+ except ImportError:
45
+ raise ImportError("python-swiftclient not installed!")
46
+
47
+ self.container_name = container_name
48
+ self.conn = Connection(
49
+ authurl=auth_url,
50
+ user=username,
51
+ key=password,
52
+ tenant_name=project_name,
53
+ auth_version='3',
54
+ os_options={'user_domain_name': 'Default', 'project_domain_name': 'Default'}
55
+ )
56
+ # Ensure container exists
57
+ try:
58
+ self.conn.put_container(self.container_name)
59
+ except Exception as e:
60
+ print(f"Swift Connection Error: {e}")
61
+
62
+ def save_file(self, file_bytes: bytes, filename: str) -> str:
63
+ ts = datetime.now().strftime("%Y%m%d_%H%M%S")
64
+ safe_name = f"{ts}_{filename}"
65
+ self.conn.put_object(
66
+ self.container_name,
67
+ safe_name,
68
+ contents=file_bytes,
69
+ content_type='application/octet-stream'
70
+ )
71
+ return f"swift://{self.container_name}/{safe_name}"
72
+
73
+ def get_file(self, filename: str) -> bytes:
74
+ # filename could be safe_name
75
+ # logic to extract key if needed
76
+ try:
77
+ _, obj = self.conn.get_object(self.container_name, filename)
78
+ return obj
79
+ except Exception:
80
+ return None
81
+
82
+ # Factory
83
+ def get_storage_provider(config_mode="LOCAL"):
84
+ if config_mode == "OPENSTACK":
85
+ return SwiftStorage(
86
+ auth_url=os.getenv("OS_AUTH_URL"),
87
+ username=os.getenv("OS_USERNAME"),
88
+ password=os.getenv("OS_PASSWORD"),
89
+ project_name=os.getenv("OS_PROJECT_NAME")
90
+ )
91
+ else:
92
+ return LocalStorage()