Initial deploy to Hugging Face
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- Dockerfile +52 -0
- backend/Dockerfile +6 -5
- backend/apps/ai_tools/views.py +1 -1
- backend/apps/bookings/apps.py +3 -0
- backend/apps/bookings/signals.py +58 -0
- backend/apps/bookings/tasks.py +206 -0
- backend/apps/bookings/views.py +0 -1
- backend/apps/mentors/admin.py +56 -5
- backend/apps/mentors/apps.py +3 -0
- backend/apps/mentors/migrations/0005_mentorapplication_ai_recommendation_and_more.py +38 -0
- backend/apps/mentors/models.py +15 -0
- backend/apps/mentors/serializers.py +6 -2
- backend/apps/mentors/services.py +122 -0
- backend/apps/mentors/signals.py +17 -0
- backend/apps/messaging/consumers.py +97 -22
- backend/apps/messaging/serializers.py +37 -2
- backend/apps/messaging/tasks.py +192 -0
- backend/apps/messaging/views.py +13 -25
- backend/apps/opportunities/views.py +2 -2
- backend/apps/users/apps.py +3 -0
- backend/apps/users/encryption.py +78 -0
- backend/apps/users/management/__init__.py +1 -0
- backend/apps/users/management/commands/__init__.py +1 -0
- backend/apps/users/management/commands/fix_profile_flags.py +47 -0
- backend/apps/users/management/commands/generate_encryption_keys.py +56 -0
- backend/apps/users/serializers.py +12 -5
- backend/apps/users/signals.py +49 -0
- backend/celery.py +0 -12
- backend/educonnect/__init__.py +8 -0
- backend/educonnect/asgi.py +8 -12
- backend/educonnect/celery.py +22 -0
- backend/educonnect/settings.py +48 -23
- backend/list_models.py +21 -0
- backend/requirements.txt +7 -4
- docker-compose.yml +101 -0
- ANALYTICS_SYSTEM.md → documentation/ANALYTICS_SYSTEM.md +0 -0
- AVATAR_ERROR_FIX.md → documentation/AVATAR_ERROR_FIX.md +0 -0
- AVATAR_UPLOAD_COMPLETE_FIX.md → documentation/AVATAR_UPLOAD_COMPLETE_FIX.md +0 -0
- AVATAR_UPLOAD_FIX.md → documentation/AVATAR_UPLOAD_FIX.md +0 -0
- BADGES_INTEGRATION.md → documentation/BADGES_INTEGRATION.md +0 -0
- CHAT_BUG_FIX.md → documentation/CHAT_BUG_FIX.md +0 -0
- DATABASE_LOCKED_SOLUTION.md → documentation/DATABASE_LOCKED_SOLUTION.md +0 -0
- DEBOUNCE_EXPLAINED.md → documentation/DEBOUNCE_EXPLAINED.md +0 -0
- DEBUGGING_AVAILABILITIES.md → documentation/DEBUGGING_AVAILABILITIES.md +0 -0
- DEBUG_ERREURS.md → documentation/DEBUG_ERREURS.md +0 -0
- documentation/ENCRYPTION.md +143 -0
- ENCRYPTION_FEATURE.md → documentation/ENCRYPTION_FEATURE.md +0 -0
- documentation/EXTERNAL_TOOLS.md +47 -0
- FORMULES_GUIDE.md → documentation/FORMULES_GUIDE.md +0 -0
- GEMINI_SETUP.md → documentation/GEMINI_SETUP.md +0 -0
Dockerfile
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# --- Étape 1 : Build du Frontend ---
|
| 2 |
+
FROM node:20-slim AS frontend-builder
|
| 3 |
+
WORKDIR /app/frontend
|
| 4 |
+
COPY frontend/package*.json ./
|
| 5 |
+
RUN npm install
|
| 6 |
+
COPY frontend/ ./
|
| 7 |
+
# On s'assure que l'API pointe vers le même domaine en prod
|
| 8 |
+
ENV VITE_API_URL=/api
|
| 9 |
+
RUN npm run build
|
| 10 |
+
|
| 11 |
+
# --- Étape 2 : Image finale ---
|
| 12 |
+
FROM python:3.12-slim
|
| 13 |
+
|
| 14 |
+
# Installation des dépendances système
|
| 15 |
+
RUN apt-get update && apt-get install -y \
|
| 16 |
+
nginx \
|
| 17 |
+
redis-server \
|
| 18 |
+
gcc \
|
| 19 |
+
libpq-dev \
|
| 20 |
+
curl \
|
| 21 |
+
supervisor \
|
| 22 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 23 |
+
|
| 24 |
+
WORKDIR /app
|
| 25 |
+
|
| 26 |
+
# Installation des dépendances Python
|
| 27 |
+
COPY backend/requirements.txt ./backend/
|
| 28 |
+
RUN pip install --upgrade pip && pip install -r backend/requirements.txt
|
| 29 |
+
RUN pip install gunicorn daphne
|
| 30 |
+
|
| 31 |
+
# Copie du code Backend
|
| 32 |
+
COPY backend/ ./backend/
|
| 33 |
+
|
| 34 |
+
# Copie du build Frontend depuis l'étape 1
|
| 35 |
+
COPY --from=frontend-builder /app/frontend/dist ./frontend_dist
|
| 36 |
+
|
| 37 |
+
# Copie des configurations
|
| 38 |
+
COPY nginx.conf /etc/nginx/sites-available/default
|
| 39 |
+
COPY supervisord.conf ./supervisord.conf
|
| 40 |
+
COPY start.sh ./start.sh
|
| 41 |
+
RUN chmod +x ./start.sh
|
| 42 |
+
|
| 43 |
+
# Configuration Django pour Hugging Face
|
| 44 |
+
ENV PYTHONUNBUFFERED=1
|
| 45 |
+
ENV DJANGO_SETTINGS_MODULE=educonnect.settings
|
| 46 |
+
# On force SQLite pour Hugging Face si aucune DB n'est fournie
|
| 47 |
+
ENV DB_HOST=""
|
| 48 |
+
|
| 49 |
+
# Hugging Face écoute sur le port 7860
|
| 50 |
+
EXPOSE 7860
|
| 51 |
+
|
| 52 |
+
CMD ["./start.sh"]
|
backend/Dockerfile
CHANGED
|
@@ -1,4 +1,4 @@
|
|
| 1 |
-
FROM python:3.
|
| 2 |
|
| 3 |
ENV PYTHONUNBUFFERED=1
|
| 4 |
ENV PYTHONDONTWRITEBYTECODE=1
|
|
@@ -6,9 +6,10 @@ ENV PYTHONDONTWRITEBYTECODE=1
|
|
| 6 |
WORKDIR /app
|
| 7 |
|
| 8 |
# Installer les dépendances système
|
| 9 |
-
RUN apt-get update && apt-get install -y
|
| 10 |
-
postgresql-client
|
| 11 |
-
gcc
|
|
|
|
| 12 |
&& rm -rf /var/lib/apt/lists/*
|
| 13 |
|
| 14 |
# Copier requirements
|
|
@@ -27,4 +28,4 @@ RUN python manage.py collectstatic --noinput
|
|
| 27 |
|
| 28 |
EXPOSE 8000
|
| 29 |
|
| 30 |
-
CMD ["gunicorn", "--bind", "0.0.0.0:8000", "--workers", "4", "
|
|
|
|
| 1 |
+
FROM python:3.12-slim
|
| 2 |
|
| 3 |
ENV PYTHONUNBUFFERED=1
|
| 4 |
ENV PYTHONDONTWRITEBYTECODE=1
|
|
|
|
| 6 |
WORKDIR /app
|
| 7 |
|
| 8 |
# Installer les dépendances système
|
| 9 |
+
RUN apt-get update && apt-get install -y \
|
| 10 |
+
postgresql-client \
|
| 11 |
+
gcc \
|
| 12 |
+
libpq-dev \
|
| 13 |
&& rm -rf /var/lib/apt/lists/*
|
| 14 |
|
| 15 |
# Copier requirements
|
|
|
|
| 28 |
|
| 29 |
EXPOSE 8000
|
| 30 |
|
| 31 |
+
CMD ["gunicorn", "--bind", "0.0.0.0:8000", "--workers", "4", "educonnect.wsgi:application"]
|
backend/apps/ai_tools/views.py
CHANGED
|
@@ -162,7 +162,7 @@ IMPORTANT - Formatage des formules :
|
|
| 162 |
"""
|
| 163 |
|
| 164 |
response = client.models.generate_content(
|
| 165 |
-
model="gemini-
|
| 166 |
)
|
| 167 |
|
| 168 |
# Estimation des tokens (approximatif)
|
|
|
|
| 162 |
"""
|
| 163 |
|
| 164 |
response = client.models.generate_content(
|
| 165 |
+
model="gemini-flash-latest", contents=prompt
|
| 166 |
)
|
| 167 |
|
| 168 |
# Estimation des tokens (approximatif)
|
backend/apps/bookings/apps.py
CHANGED
|
@@ -4,3 +4,6 @@ from django.apps import AppConfig
|
|
| 4 |
class BookingsConfig(AppConfig):
|
| 5 |
default_auto_field = 'django.db.models.BigAutoField'
|
| 6 |
name = 'apps.bookings'
|
|
|
|
|
|
|
|
|
|
|
|
| 4 |
class BookingsConfig(AppConfig):
|
| 5 |
default_auto_field = 'django.db.models.BigAutoField'
|
| 6 |
name = 'apps.bookings'
|
| 7 |
+
|
| 8 |
+
def ready(self):
|
| 9 |
+
import apps.bookings.signals # noqa
|
backend/apps/bookings/signals.py
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ============================================
|
| 2 |
+
# apps/bookings/signals.py - Signaux pour les réservations
|
| 3 |
+
# ============================================
|
| 4 |
+
from django.db.models.signals import post_save
|
| 5 |
+
from django.dispatch import receiver
|
| 6 |
+
import datetime
|
| 7 |
+
import logging
|
| 8 |
+
|
| 9 |
+
logger = logging.getLogger(__name__)
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
@receiver(post_save, sender='bookings.Booking')
|
| 13 |
+
def schedule_tasks_on_confirm(sender, instance, created, **kwargs):
|
| 14 |
+
"""
|
| 15 |
+
Quand un booking passe en CONFIRMED:
|
| 16 |
+
1. Planifie le déblocage des messages
|
| 17 |
+
2. Planifie les rappels de rendez-vous
|
| 18 |
+
"""
|
| 19 |
+
from django.utils import timezone
|
| 20 |
+
|
| 21 |
+
# Ne traiter que les bookings confirmés
|
| 22 |
+
if instance.status != 'CONFIRMED':
|
| 23 |
+
return
|
| 24 |
+
|
| 25 |
+
# Calculer l'heure de début
|
| 26 |
+
booking_start = datetime.datetime.combine(instance.date, instance.time)
|
| 27 |
+
booking_start = timezone.make_aware(booking_start)
|
| 28 |
+
now = timezone.now()
|
| 29 |
+
|
| 30 |
+
# === 1. Déblocage des messages ===
|
| 31 |
+
if booking_start <= now:
|
| 32 |
+
try:
|
| 33 |
+
from apps.messaging.tasks import unlock_messages_for_booking
|
| 34 |
+
# Exécuter immédiatement
|
| 35 |
+
unlock_messages_for_booking.delay(instance.id)
|
| 36 |
+
logger.info(f"Déblocage immédiat des messages pour booking {instance.id}")
|
| 37 |
+
except Exception as e:
|
| 38 |
+
logger.error(f"Erreur lors du déblocage immédiat: {e}")
|
| 39 |
+
else:
|
| 40 |
+
try:
|
| 41 |
+
from apps.messaging.tasks import unlock_messages_for_booking
|
| 42 |
+
# Planifier l'exécution à l'heure du booking
|
| 43 |
+
unlock_messages_for_booking.apply_async(
|
| 44 |
+
args=[instance.id],
|
| 45 |
+
eta=booking_start
|
| 46 |
+
)
|
| 47 |
+
logger.info(f"Déblocage planifié pour booking {instance.id} à {booking_start}")
|
| 48 |
+
except Exception as e:
|
| 49 |
+
logger.error(f"Erreur lors de la planification du déblocage: {e}")
|
| 50 |
+
|
| 51 |
+
# === 2. Planifier les rappels de rendez-vous ===
|
| 52 |
+
try:
|
| 53 |
+
from apps.bookings.tasks import schedule_all_reminders
|
| 54 |
+
# Planifier tous les rappels de manière asynchrone
|
| 55 |
+
schedule_all_reminders.delay(instance.id)
|
| 56 |
+
logger.info(f"Planification des rappels pour booking {instance.id}")
|
| 57 |
+
except Exception as e:
|
| 58 |
+
logger.error(f"Erreur lors de la planification des rappels: {e}")
|
backend/apps/bookings/tasks.py
ADDED
|
@@ -0,0 +1,206 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ============================================
|
| 2 |
+
# apps/bookings/tasks.py - Tâches Celery pour les réservations
|
| 3 |
+
# ============================================
|
| 4 |
+
import datetime
|
| 5 |
+
from celery import shared_task
|
| 6 |
+
from django.utils import timezone
|
| 7 |
+
import logging
|
| 8 |
+
|
| 9 |
+
logger = logging.getLogger(__name__)
|
| 10 |
+
|
| 11 |
+
# Intervalles de rappel avant le rendez-vous (en minutes)
|
| 12 |
+
REMINDER_INTERVALS = [
|
| 13 |
+
(1440, "24 heures"), # 24h avant
|
| 14 |
+
(60, "1 heure"), # 1h avant
|
| 15 |
+
(30, "30 minutes"), # 30min avant
|
| 16 |
+
(15, "15 minutes"), # 15min avant
|
| 17 |
+
(5, "5 minutes"), # 5min avant
|
| 18 |
+
]
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
@shared_task(name='bookings.send_booking_reminder')
|
| 22 |
+
def send_booking_reminder(booking_id, time_label):
|
| 23 |
+
"""
|
| 24 |
+
Envoie une notification de rappel aux participants d'un booking.
|
| 25 |
+
|
| 26 |
+
Args:
|
| 27 |
+
booking_id: ID du booking
|
| 28 |
+
time_label: Texte décrivant le temps restant (ex: "30 minutes")
|
| 29 |
+
"""
|
| 30 |
+
from apps.bookings.models import Booking
|
| 31 |
+
from apps.notifications.models import Notification, NotificationTitle, NotificationMessage
|
| 32 |
+
from apps.notifications.services import NotificationService
|
| 33 |
+
|
| 34 |
+
try:
|
| 35 |
+
booking = Booking.objects.get(id=booking_id)
|
| 36 |
+
|
| 37 |
+
# Vérifier que le booking est toujours confirmé
|
| 38 |
+
if booking.status != 'CONFIRMED':
|
| 39 |
+
logger.info(f"Booking {booking_id} n'est plus confirmé, skip reminder.")
|
| 40 |
+
return
|
| 41 |
+
|
| 42 |
+
# Récupérer les noms des participants
|
| 43 |
+
student_profile = booking.student.profiles.filter(is_current=True).first()
|
| 44 |
+
student_name = student_profile.name if student_profile else booking.student.email
|
| 45 |
+
|
| 46 |
+
mentor_profile = booking.mentor.profiles.filter(is_current=True).first()
|
| 47 |
+
mentor_name = mentor_profile.name if mentor_profile else booking.mentor.email
|
| 48 |
+
|
| 49 |
+
# Notification pour l'étudiant
|
| 50 |
+
notif_student = Notification.objects.create(
|
| 51 |
+
user=booking.student,
|
| 52 |
+
type='BOOKING',
|
| 53 |
+
link=f'/chat?partner={booking.mentor.id}'
|
| 54 |
+
)
|
| 55 |
+
NotificationTitle.objects.create(
|
| 56 |
+
notification=notif_student,
|
| 57 |
+
title=f'⏰ Rappel : Rendez-vous dans {time_label}'
|
| 58 |
+
)
|
| 59 |
+
NotificationMessage.objects.create(
|
| 60 |
+
notification=notif_student,
|
| 61 |
+
message=f'Votre session avec {mentor_name} commence dans {time_label}. Préparez-vous !'
|
| 62 |
+
)
|
| 63 |
+
NotificationService._send_ws_notification(notif_student)
|
| 64 |
+
|
| 65 |
+
# Notification pour le mentor
|
| 66 |
+
notif_mentor = Notification.objects.create(
|
| 67 |
+
user=booking.mentor,
|
| 68 |
+
type='BOOKING',
|
| 69 |
+
link=f'/chat?partner={booking.student.id}'
|
| 70 |
+
)
|
| 71 |
+
NotificationTitle.objects.create(
|
| 72 |
+
notification=notif_mentor,
|
| 73 |
+
title=f'⏰ Rappel : Rendez-vous dans {time_label}'
|
| 74 |
+
)
|
| 75 |
+
NotificationMessage.objects.create(
|
| 76 |
+
notification=notif_mentor,
|
| 77 |
+
message=f'Votre session avec {student_name} commence dans {time_label}. L\'étudiant vous attend !'
|
| 78 |
+
)
|
| 79 |
+
NotificationService._send_ws_notification(notif_mentor)
|
| 80 |
+
|
| 81 |
+
logger.info(f"Rappel envoyé pour booking {booking_id} ({time_label})")
|
| 82 |
+
return True
|
| 83 |
+
|
| 84 |
+
except Booking.DoesNotExist:
|
| 85 |
+
logger.error(f"Booking {booking_id} introuvable")
|
| 86 |
+
return False
|
| 87 |
+
except Exception as e:
|
| 88 |
+
logger.error(f"Erreur lors de l'envoi du rappel: {e}")
|
| 89 |
+
raise
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
@shared_task(name='bookings.send_booking_starting_now')
|
| 93 |
+
def send_booking_starting_now(booking_id):
|
| 94 |
+
"""
|
| 95 |
+
Notification spéciale quand le rendez-vous commence.
|
| 96 |
+
"""
|
| 97 |
+
from apps.bookings.models import Booking
|
| 98 |
+
from apps.notifications.models import Notification, NotificationTitle, NotificationMessage
|
| 99 |
+
from apps.notifications.services import NotificationService
|
| 100 |
+
|
| 101 |
+
try:
|
| 102 |
+
booking = Booking.objects.get(id=booking_id)
|
| 103 |
+
|
| 104 |
+
if booking.status != 'CONFIRMED':
|
| 105 |
+
return
|
| 106 |
+
|
| 107 |
+
# Récupérer les noms
|
| 108 |
+
student_profile = booking.student.profiles.filter(is_current=True).first()
|
| 109 |
+
student_name = student_profile.name if student_profile else booking.student.email
|
| 110 |
+
|
| 111 |
+
mentor_profile = booking.mentor.profiles.filter(is_current=True).first()
|
| 112 |
+
mentor_name = mentor_profile.name if mentor_profile else booking.mentor.email
|
| 113 |
+
|
| 114 |
+
# Notification pour l'étudiant
|
| 115 |
+
notif_student = Notification.objects.create(
|
| 116 |
+
user=booking.student,
|
| 117 |
+
type='BOOKING',
|
| 118 |
+
link=f'/chat?partner={booking.mentor.id}'
|
| 119 |
+
)
|
| 120 |
+
NotificationTitle.objects.create(
|
| 121 |
+
notification=notif_student,
|
| 122 |
+
title='🚀 C\'est l\'heure de votre session !'
|
| 123 |
+
)
|
| 124 |
+
NotificationMessage.objects.create(
|
| 125 |
+
notification=notif_student,
|
| 126 |
+
message=f'Votre session de mentorat avec {mentor_name} commence maintenant. Cliquez pour rejoindre le chat.'
|
| 127 |
+
)
|
| 128 |
+
NotificationService._send_ws_notification(notif_student)
|
| 129 |
+
|
| 130 |
+
# Notification pour le mentor
|
| 131 |
+
notif_mentor = Notification.objects.create(
|
| 132 |
+
user=booking.mentor,
|
| 133 |
+
type='BOOKING',
|
| 134 |
+
link=f'/chat?partner={booking.student.id}'
|
| 135 |
+
)
|
| 136 |
+
NotificationTitle.objects.create(
|
| 137 |
+
notification=notif_mentor,
|
| 138 |
+
title='🚀 C\'est l\'heure de votre session !'
|
| 139 |
+
)
|
| 140 |
+
NotificationMessage.objects.create(
|
| 141 |
+
notification=notif_mentor,
|
| 142 |
+
message=f'Votre session avec {student_name} commence maintenant. L\'étudiant vous attend !'
|
| 143 |
+
)
|
| 144 |
+
NotificationService._send_ws_notification(notif_mentor)
|
| 145 |
+
|
| 146 |
+
logger.info(f"Notification de démarrage envoyée pour booking {booking_id}")
|
| 147 |
+
return True
|
| 148 |
+
|
| 149 |
+
except Booking.DoesNotExist:
|
| 150 |
+
logger.error(f"Booking {booking_id} introuvable")
|
| 151 |
+
return False
|
| 152 |
+
except Exception as e:
|
| 153 |
+
logger.error(f"Erreur: {e}")
|
| 154 |
+
raise
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
@shared_task(name='bookings.schedule_all_reminders')
|
| 158 |
+
def schedule_all_reminders(booking_id):
|
| 159 |
+
"""
|
| 160 |
+
Planifie tous les rappels pour un booking.
|
| 161 |
+
À appeler quand un booking est confirmé.
|
| 162 |
+
"""
|
| 163 |
+
from apps.bookings.models import Booking
|
| 164 |
+
|
| 165 |
+
try:
|
| 166 |
+
booking = Booking.objects.get(id=booking_id)
|
| 167 |
+
|
| 168 |
+
if booking.status != 'CONFIRMED':
|
| 169 |
+
return
|
| 170 |
+
|
| 171 |
+
# Calculer l'heure de début
|
| 172 |
+
booking_start = datetime.datetime.combine(booking.date, booking.time)
|
| 173 |
+
booking_start = timezone.make_aware(booking_start)
|
| 174 |
+
now = timezone.now()
|
| 175 |
+
|
| 176 |
+
scheduled_count = 0
|
| 177 |
+
|
| 178 |
+
# Planifier chaque rappel
|
| 179 |
+
for minutes_before, label in REMINDER_INTERVALS:
|
| 180 |
+
reminder_time = booking_start - datetime.timedelta(minutes=minutes_before)
|
| 181 |
+
|
| 182 |
+
# Ne planifier que les rappels dans le futur
|
| 183 |
+
if reminder_time > now:
|
| 184 |
+
send_booking_reminder.apply_async(
|
| 185 |
+
args=[booking_id, label],
|
| 186 |
+
eta=reminder_time
|
| 187 |
+
)
|
| 188 |
+
scheduled_count += 1
|
| 189 |
+
logger.info(f"Rappel planifié pour booking {booking_id}: {label} (à {reminder_time})")
|
| 190 |
+
|
| 191 |
+
# Planifier la notification de démarrage
|
| 192 |
+
if booking_start > now:
|
| 193 |
+
send_booking_starting_now.apply_async(
|
| 194 |
+
args=[booking_id],
|
| 195 |
+
eta=booking_start
|
| 196 |
+
)
|
| 197 |
+
logger.info(f"Notification de démarrage planifiée pour booking {booking_id}")
|
| 198 |
+
|
| 199 |
+
return f"Planifié {scheduled_count} rappels pour booking {booking_id}"
|
| 200 |
+
|
| 201 |
+
except Booking.DoesNotExist:
|
| 202 |
+
logger.error(f"Booking {booking_id} introuvable")
|
| 203 |
+
return None
|
| 204 |
+
except Exception as e:
|
| 205 |
+
logger.error(f"Erreur lors de la planification: {e}")
|
| 206 |
+
raise
|
backend/apps/bookings/views.py
CHANGED
|
@@ -89,7 +89,6 @@ class BookingViewSet(HashIdMixin, viewsets.ModelViewSet):
|
|
| 89 |
|
| 90 |
bookings = Booking.objects.filter(
|
| 91 |
mentor=request.user,
|
| 92 |
-
status='PENDING',
|
| 93 |
is_active=True
|
| 94 |
).order_by('date', 'time')
|
| 95 |
|
|
|
|
| 89 |
|
| 90 |
bookings = Booking.objects.filter(
|
| 91 |
mentor=request.user,
|
|
|
|
| 92 |
is_active=True
|
| 93 |
).order_by('date', 'time')
|
| 94 |
|
backend/apps/mentors/admin.py
CHANGED
|
@@ -43,11 +43,30 @@ from .models import MentorApplication
|
|
| 43 |
|
| 44 |
@admin.register(MentorApplication)
|
| 45 |
class MentorApplicationAdmin(admin.ModelAdmin):
|
| 46 |
-
list_display = ('user', 'status_badge', 'university', 'created_at', 'cv_link')
|
| 47 |
-
list_filter = ('status', 'created_at')
|
| 48 |
search_fields = ('user__email', 'user__first_name', 'user__last_name', 'university')
|
| 49 |
-
readonly_fields = ('created_at', 'updated_at')
|
| 50 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 51 |
def status_badge(self, obj):
|
| 52 |
colors = {
|
| 53 |
'PENDING': 'orange',
|
|
@@ -61,13 +80,33 @@ class MentorApplicationAdmin(admin.ModelAdmin):
|
|
| 61 |
)
|
| 62 |
status_badge.short_description = 'Statut'
|
| 63 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 64 |
def cv_link(self, obj):
|
| 65 |
if obj.cv_file:
|
| 66 |
-
return format_html('<a href="{}" target="_blank" style="background-color: #3b82f6; color: white; padding: 3px 8px; border-radius: 4px; text-decoration: none;">Voir
|
| 67 |
return "-"
|
| 68 |
cv_link.short_description = 'CV'
|
| 69 |
|
| 70 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 71 |
|
| 72 |
@admin.action(description='Approuver les candidatures sélectionnées')
|
| 73 |
def approve_applications(self, request, queryset):
|
|
@@ -79,3 +118,15 @@ class MentorApplicationAdmin(admin.ModelAdmin):
|
|
| 79 |
updated = queryset.update(status='REJECTED')
|
| 80 |
self.message_user(request, f"{updated} candidature(s) rejetée(s).")
|
| 81 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 43 |
|
| 44 |
@admin.register(MentorApplication)
|
| 45 |
class MentorApplicationAdmin(admin.ModelAdmin):
|
| 46 |
+
list_display = ('user', 'status_badge', 'ai_status_badge', 'ai_validated', 'ai_score', 'university', 'created_at', 'cv_link', 'id_card_link')
|
| 47 |
+
list_filter = ('status', 'ai_status', 'ai_validated', 'created_at')
|
| 48 |
search_fields = ('user__email', 'user__first_name', 'user__last_name', 'university')
|
| 49 |
+
readonly_fields = ('created_at', 'updated_at', 'ai_status', 'ai_recommendation', 'ai_score', 'ai_validated')
|
| 50 |
|
| 51 |
+
fieldsets = (
|
| 52 |
+
('Informations Utilisateur', {
|
| 53 |
+
'fields': ('user', 'university', 'bio', 'specialties', 'availability')
|
| 54 |
+
}),
|
| 55 |
+
('Documents', {
|
| 56 |
+
'fields': ('cv_file', 'id_card_photo')
|
| 57 |
+
}),
|
| 58 |
+
('Réseaux Sociaux', {
|
| 59 |
+
'fields': ('linkedin', 'twitter', 'website')
|
| 60 |
+
}),
|
| 61 |
+
('Analyse IA (Automatique)', {
|
| 62 |
+
'fields': ('ai_status', 'ai_validated', 'ai_score', 'ai_recommendation'),
|
| 63 |
+
'description': 'Ces champs sont remplis automatiquement par l\'IA Gemini après la soumission.'
|
| 64 |
+
}),
|
| 65 |
+
('Décision Admin', {
|
| 66 |
+
'fields': ('status', 'created_at', 'updated_at')
|
| 67 |
+
}),
|
| 68 |
+
)
|
| 69 |
+
|
| 70 |
def status_badge(self, obj):
|
| 71 |
colors = {
|
| 72 |
'PENDING': 'orange',
|
|
|
|
| 80 |
)
|
| 81 |
status_badge.short_description = 'Statut'
|
| 82 |
|
| 83 |
+
def ai_status_badge(self, obj):
|
| 84 |
+
colors = {
|
| 85 |
+
'PENDING': '#6b7280', # gray-500
|
| 86 |
+
'PROCESSING': '#3b82f6', # blue-500
|
| 87 |
+
'COMPLETED': '#10b981', # emerald-500
|
| 88 |
+
'FAILED': '#ef4444', # red-500
|
| 89 |
+
}
|
| 90 |
+
return format_html(
|
| 91 |
+
'<span style="color: white; background-color: {}; padding: 2px 8px; border-radius: 8px; font-size: 11px; font-weight: 500;">{}</span>',
|
| 92 |
+
colors.get(obj.ai_status, 'gray'),
|
| 93 |
+
obj.get_ai_status_display()
|
| 94 |
+
)
|
| 95 |
+
ai_status_badge.short_description = 'Analyse IA'
|
| 96 |
+
|
| 97 |
def cv_link(self, obj):
|
| 98 |
if obj.cv_file:
|
| 99 |
+
return format_html('<a href="{}" target="_blank" style="background-color: #3b82f6; color: white; padding: 3px 8px; border-radius: 4px; text-decoration: none; font-size: 11px;">Voir CV</a>', obj.cv_file.url)
|
| 100 |
return "-"
|
| 101 |
cv_link.short_description = 'CV'
|
| 102 |
|
| 103 |
+
def id_card_link(self, obj):
|
| 104 |
+
if obj.id_card_photo:
|
| 105 |
+
return format_html('<a href="{}" target="_blank" style="background-color: #8b5cf6; color: white; padding: 3px 8px; border-radius: 4px; text-decoration: none; font-size: 11px;">Voir ID</a>', obj.id_card_photo.url)
|
| 106 |
+
return "-"
|
| 107 |
+
id_card_link.short_description = 'Photo ID'
|
| 108 |
+
|
| 109 |
+
actions = ['approve_applications', 'reject_applications', 'retry_ai_review']
|
| 110 |
|
| 111 |
@admin.action(description='Approuver les candidatures sélectionnées')
|
| 112 |
def approve_applications(self, request, queryset):
|
|
|
|
| 118 |
updated = queryset.update(status='REJECTED')
|
| 119 |
self.message_user(request, f"{updated} candidature(s) rejetée(s).")
|
| 120 |
|
| 121 |
+
@admin.action(description='Relancer l\'analyse IA')
|
| 122 |
+
def retry_ai_review(self, request, queryset):
|
| 123 |
+
from .services import MentorAIRewiewService
|
| 124 |
+
import threading
|
| 125 |
+
for instance in queryset:
|
| 126 |
+
thread = threading.Thread(
|
| 127 |
+
target=MentorAIRewiewService.review_application,
|
| 128 |
+
args=(instance.id,)
|
| 129 |
+
)
|
| 130 |
+
thread.start()
|
| 131 |
+
self.message_user(request, f"Analyse IA relancée pour {queryset.count()} candidature(s).")
|
| 132 |
+
|
backend/apps/mentors/apps.py
CHANGED
|
@@ -4,3 +4,6 @@ from django.apps import AppConfig
|
|
| 4 |
class MentorsConfig(AppConfig):
|
| 5 |
default_auto_field = 'django.db.models.BigAutoField'
|
| 6 |
name = 'apps.mentors'
|
|
|
|
|
|
|
|
|
|
|
|
| 4 |
class MentorsConfig(AppConfig):
|
| 5 |
default_auto_field = 'django.db.models.BigAutoField'
|
| 6 |
name = 'apps.mentors'
|
| 7 |
+
|
| 8 |
+
def ready(self):
|
| 9 |
+
import apps.mentors.signals
|
backend/apps/mentors/migrations/0005_mentorapplication_ai_recommendation_and_more.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Generated by Django 4.2.7 on 2025-12-22 12:34
|
| 2 |
+
|
| 3 |
+
from django.db import migrations, models
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class Migration(migrations.Migration):
|
| 7 |
+
|
| 8 |
+
dependencies = [
|
| 9 |
+
('mentors', '0004_mentorspecificdateavailability'),
|
| 10 |
+
]
|
| 11 |
+
|
| 12 |
+
operations = [
|
| 13 |
+
migrations.AddField(
|
| 14 |
+
model_name='mentorapplication',
|
| 15 |
+
name='ai_recommendation',
|
| 16 |
+
field=models.TextField(blank=True, verbose_name='Recommandation IA'),
|
| 17 |
+
),
|
| 18 |
+
migrations.AddField(
|
| 19 |
+
model_name='mentorapplication',
|
| 20 |
+
name='ai_score',
|
| 21 |
+
field=models.IntegerField(blank=True, null=True, verbose_name='Score de confiance IA'),
|
| 22 |
+
),
|
| 23 |
+
migrations.AddField(
|
| 24 |
+
model_name='mentorapplication',
|
| 25 |
+
name='ai_status',
|
| 26 |
+
field=models.CharField(choices=[('PENDING', 'En attente'), ('PROCESSING', "En cours d'analyse"), ('COMPLETED', 'Analyse terminée'), ('FAILED', "Échec de l'analyse")], default='PENDING', max_length=20),
|
| 27 |
+
),
|
| 28 |
+
migrations.AddField(
|
| 29 |
+
model_name='mentorapplication',
|
| 30 |
+
name='ai_validated',
|
| 31 |
+
field=models.BooleanField(default=False, verbose_name="Validé par l'IA"),
|
| 32 |
+
),
|
| 33 |
+
migrations.AddField(
|
| 34 |
+
model_name='mentorapplication',
|
| 35 |
+
name='id_card_photo',
|
| 36 |
+
field=models.ImageField(blank=True, null=True, upload_to='mentors/ids/'),
|
| 37 |
+
),
|
| 38 |
+
]
|
backend/apps/mentors/models.py
CHANGED
|
@@ -136,6 +136,21 @@ class MentorApplication(TimestampMixin, SoftDeleteMixin):
|
|
| 136 |
twitter = models.URLField(max_length=500, blank=True)
|
| 137 |
website = models.URLField(max_length=500, blank=True)
|
| 138 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 139 |
class Meta:
|
| 140 |
db_table = 'mentor_applications'
|
| 141 |
ordering = ['-created_at']
|
|
|
|
| 136 |
twitter = models.URLField(max_length=500, blank=True)
|
| 137 |
website = models.URLField(max_length=500, blank=True)
|
| 138 |
|
| 139 |
+
# Nouveau champ : Photo d'Identité
|
| 140 |
+
id_card_photo = models.ImageField(upload_to='mentors/ids/', null=True, blank=True)
|
| 141 |
+
|
| 142 |
+
# Champs pour l'analyse IA
|
| 143 |
+
AI_STATUS_CHOICES = [
|
| 144 |
+
('PENDING', 'En attente'),
|
| 145 |
+
('PROCESSING', 'En cours d\'analyse'),
|
| 146 |
+
('COMPLETED', 'Analyse terminée'),
|
| 147 |
+
('FAILED', 'Échec de l\'analyse'),
|
| 148 |
+
]
|
| 149 |
+
ai_status = models.CharField(max_length=20, choices=AI_STATUS_CHOICES, default='PENDING')
|
| 150 |
+
ai_recommendation = models.TextField(blank=True, verbose_name="Recommandation IA")
|
| 151 |
+
ai_score = models.IntegerField(null=True, blank=True, verbose_name="Score de confiance IA")
|
| 152 |
+
ai_validated = models.BooleanField(default=False, verbose_name="Validé par l'IA")
|
| 153 |
+
|
| 154 |
class Meta:
|
| 155 |
db_table = 'mentor_applications'
|
| 156 |
ordering = ['-created_at']
|
backend/apps/mentors/serializers.py
CHANGED
|
@@ -287,11 +287,15 @@ class MentorApplicationSerializer(serializers.ModelSerializer):
|
|
| 287 |
class Meta:
|
| 288 |
model = MentorApplication
|
| 289 |
fields = [
|
| 290 |
-
'id', 'user', 'cv_file', 'status', 'bio', 'university',
|
| 291 |
'specialties', 'availability', 'linkedin', 'twitter', 'website',
|
|
|
|
| 292 |
'created_at'
|
| 293 |
]
|
| 294 |
-
read_only_fields = [
|
|
|
|
|
|
|
|
|
|
| 295 |
|
| 296 |
def validate_cv_file(self, value):
|
| 297 |
if not value.name.endswith('.pdf'):
|
|
|
|
| 287 |
class Meta:
|
| 288 |
model = MentorApplication
|
| 289 |
fields = [
|
| 290 |
+
'id', 'user', 'cv_file', 'id_card_photo', 'status', 'bio', 'university',
|
| 291 |
'specialties', 'availability', 'linkedin', 'twitter', 'website',
|
| 292 |
+
'ai_status', 'ai_recommendation', 'ai_score', 'ai_validated',
|
| 293 |
'created_at'
|
| 294 |
]
|
| 295 |
+
read_only_fields = [
|
| 296 |
+
'id', 'user', 'status', 'ai_status', 'ai_recommendation',
|
| 297 |
+
'ai_score', 'ai_validated', 'created_at'
|
| 298 |
+
]
|
| 299 |
|
| 300 |
def validate_cv_file(self, value):
|
| 301 |
if not value.name.endswith('.pdf'):
|
backend/apps/mentors/services.py
ADDED
|
@@ -0,0 +1,122 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ============================================
|
| 2 |
+
# apps/mentors/services.py - Services Mentors
|
| 3 |
+
# ============================================
|
| 4 |
+
import os
|
| 5 |
+
import logging
|
| 6 |
+
import json
|
| 7 |
+
import re
|
| 8 |
+
from pypdf import PdfReader
|
| 9 |
+
from google import genai
|
| 10 |
+
from django.conf import settings
|
| 11 |
+
from .models import MentorApplication
|
| 12 |
+
|
| 13 |
+
logger = logging.getLogger(__name__)
|
| 14 |
+
|
| 15 |
+
class MentorAIRewiewService:
|
| 16 |
+
@staticmethod
|
| 17 |
+
def extract_text_from_pdf(pdf_path):
|
| 18 |
+
"""Extrait le texte d'un fichier PDF"""
|
| 19 |
+
try:
|
| 20 |
+
if not os.path.exists(pdf_path):
|
| 21 |
+
logger.error(f"Fichier PDF non trouvé: {pdf_path}")
|
| 22 |
+
return ""
|
| 23 |
+
|
| 24 |
+
reader = PdfReader(pdf_path)
|
| 25 |
+
text = ""
|
| 26 |
+
for page in reader.pages:
|
| 27 |
+
text += page.extract_text() + "\n"
|
| 28 |
+
return text
|
| 29 |
+
except Exception as e:
|
| 30 |
+
logger.error(f"Erreur lors de l'extraction du PDF: {str(e)}")
|
| 31 |
+
return ""
|
| 32 |
+
|
| 33 |
+
@staticmethod
|
| 34 |
+
def review_application(application_id):
|
| 35 |
+
"""Analyse une candidature avec l'IA"""
|
| 36 |
+
try:
|
| 37 |
+
application = MentorApplication.objects.get(id=application_id)
|
| 38 |
+
application.ai_status = 'PROCESSING'
|
| 39 |
+
application.save(update_fields=['ai_status'])
|
| 40 |
+
|
| 41 |
+
# Extraction du texte du CV
|
| 42 |
+
cv_text = ""
|
| 43 |
+
if application.cv_file:
|
| 44 |
+
cv_text = MentorAIRewiewService.extract_text_from_pdf(application.cv_file.path)
|
| 45 |
+
|
| 46 |
+
# Récupérer le nom de l'utilisateur
|
| 47 |
+
user_name = "Inconnu"
|
| 48 |
+
profile = application.user.profiles.filter(is_current=True).first()
|
| 49 |
+
if profile:
|
| 50 |
+
user_name = profile.name
|
| 51 |
+
|
| 52 |
+
# Préparation du prompt
|
| 53 |
+
prompt = f"""
|
| 54 |
+
Tu es un expert en recrutement académique pour EduLab Africa.
|
| 55 |
+
Ta mission est d'analyser la candidature d'un utilisateur qui souhaite devenir Mentor sur notre plateforme.
|
| 56 |
+
|
| 57 |
+
Voici les informations de la candidature :
|
| 58 |
+
- Nom : {user_name}
|
| 59 |
+
- Université : {application.university}
|
| 60 |
+
- Spécialités : {', '.join(application.specialties)}
|
| 61 |
+
- Biographie : {application.bio}
|
| 62 |
+
- LinkedIn : {application.linkedin}
|
| 63 |
+
|
| 64 |
+
Texte extrait du CV (PDF) :
|
| 65 |
+
---
|
| 66 |
+
{cv_text[:4000]}
|
| 67 |
+
---
|
| 68 |
+
|
| 69 |
+
Analyse cette candidature selon les critères suivants :
|
| 70 |
+
1. Expertise technique dans les spécialités mentionnées.
|
| 71 |
+
2. Expérience académique ou professionnelle pertinente.
|
| 72 |
+
3. Capacité à transmettre des connaissances (pédagogie).
|
| 73 |
+
4. Cohérence globale du profil.
|
| 74 |
+
|
| 75 |
+
Réponds UNIQUEMENT au format JSON suivant (sans texte avant ou après) :
|
| 76 |
+
{{
|
| 77 |
+
"score": (un entier entre 0 et 100),
|
| 78 |
+
"recommendation": "Un résumé détaillé de ton analyse en français et pourquoi tu valides ou non le profil.",
|
| 79 |
+
"validated": (true ou false)
|
| 80 |
+
}}
|
| 81 |
+
"""
|
| 82 |
+
|
| 83 |
+
# Appel à Gemini via le nouveau SDK (comme dans Tutor AI)
|
| 84 |
+
if not settings.GEMINI_API_KEY:
|
| 85 |
+
raise ValueError("GEMINI_API_KEY non configurée dans les settings.")
|
| 86 |
+
|
| 87 |
+
client = genai.Client(api_key=settings.GEMINI_API_KEY)
|
| 88 |
+
|
| 89 |
+
response = client.models.generate_content(
|
| 90 |
+
model='gemini-flash-latest',
|
| 91 |
+
contents=prompt
|
| 92 |
+
)
|
| 93 |
+
|
| 94 |
+
# Parser la réponse
|
| 95 |
+
text_response = response.text
|
| 96 |
+
json_match = re.search(r'\{.*\}', text_response, re.DOTALL)
|
| 97 |
+
|
| 98 |
+
if json_match:
|
| 99 |
+
result = json.loads(json_match.group())
|
| 100 |
+
|
| 101 |
+
application.ai_score = result.get('score', 0)
|
| 102 |
+
application.ai_recommendation = result.get('recommendation', '')
|
| 103 |
+
application.ai_validated = result.get('validated', False)
|
| 104 |
+
application.ai_status = 'COMPLETED'
|
| 105 |
+
else:
|
| 106 |
+
application.ai_status = 'FAILED'
|
| 107 |
+
application.ai_recommendation = "L'IA n'a pas renvoyé un format JSON valide."
|
| 108 |
+
|
| 109 |
+
application.save()
|
| 110 |
+
return True
|
| 111 |
+
|
| 112 |
+
except Exception as e:
|
| 113 |
+
logger.error(f"Erreur lors de l'analyse IA: {str(e)}")
|
| 114 |
+
try:
|
| 115 |
+
# Re-fetch application to avoid stale data if needed
|
| 116 |
+
app = MentorApplication.objects.get(id=application_id)
|
| 117 |
+
app.ai_status = 'FAILED'
|
| 118 |
+
app.ai_recommendation = f"Erreur technique: {str(e)}"
|
| 119 |
+
app.save()
|
| 120 |
+
except:
|
| 121 |
+
pass
|
| 122 |
+
return False
|
backend/apps/mentors/signals.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from django.db.models.signals import post_save
|
| 2 |
+
from django.dispatch import receiver
|
| 3 |
+
from .models import MentorApplication
|
| 4 |
+
from .services import MentorAIRewiewService
|
| 5 |
+
import threading
|
| 6 |
+
|
| 7 |
+
@receiver(post_save, sender=MentorApplication)
|
| 8 |
+
def trigger_ai_review(sender, instance, created, **kwargs):
|
| 9 |
+
"""Déclenche l'analyse IA dès qu'une candidature est soumise"""
|
| 10 |
+
if created:
|
| 11 |
+
# On lance l'analyse dans un thread séparé pour ne pas bloquer la réponse HTTP
|
| 12 |
+
# En production, on utiliserait Celery
|
| 13 |
+
thread = threading.Thread(
|
| 14 |
+
target=MentorAIRewiewService.review_application,
|
| 15 |
+
args=(instance.id,)
|
| 16 |
+
)
|
| 17 |
+
thread.start()
|
backend/apps/messaging/consumers.py
CHANGED
|
@@ -2,6 +2,7 @@
|
|
| 2 |
# apps/messaging/consumers.py - WebSocket Consumer
|
| 3 |
# ============================================
|
| 4 |
import json
|
|
|
|
| 5 |
from channels.generic.websocket import AsyncWebsocketConsumer
|
| 6 |
from channels.db import database_sync_to_async
|
| 7 |
from apps.messaging.models import Message, MessageContent, Conversation
|
|
@@ -26,6 +27,9 @@ class ChatConsumer(AsyncWebsocketConsumer):
|
|
| 26 |
await self.close()
|
| 27 |
return
|
| 28 |
|
|
|
|
|
|
|
|
|
|
| 29 |
# Rejoindre le groupe
|
| 30 |
await self.channel_layer.group_add(
|
| 31 |
self.room_group_name,
|
|
@@ -36,10 +40,11 @@ class ChatConsumer(AsyncWebsocketConsumer):
|
|
| 36 |
|
| 37 |
async def disconnect(self, close_code):
|
| 38 |
# Quitter le groupe
|
| 39 |
-
|
| 40 |
-
self.
|
| 41 |
-
|
| 42 |
-
|
|
|
|
| 43 |
|
| 44 |
async def receive(self, text_data):
|
| 45 |
"""Recevoir un message du WebSocket"""
|
|
@@ -47,31 +52,71 @@ class ChatConsumer(AsyncWebsocketConsumer):
|
|
| 47 |
message_content = data.get('message')
|
| 48 |
user = self.scope['user']
|
| 49 |
|
| 50 |
-
# Sauvegarder en base de données
|
| 51 |
-
message = await self.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 52 |
|
| 53 |
-
# Envoyer au groupe
|
| 54 |
await self.channel_layer.group_send(
|
| 55 |
self.room_group_name,
|
| 56 |
{
|
| 57 |
'type': 'chat_message',
|
| 58 |
-
'message':
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
'sender': {
|
| 62 |
-
'id': user.id,
|
| 63 |
-
'email': user.email
|
| 64 |
-
},
|
| 65 |
-
'timestamp': message.created_at.isoformat()
|
| 66 |
-
}
|
| 67 |
}
|
| 68 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 69 |
|
| 70 |
async def chat_message(self, event):
|
| 71 |
-
"""Envoyer le message au WebSocket"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 72 |
await self.send(text_data=json.dumps({
|
| 73 |
-
'
|
|
|
|
| 74 |
}))
|
|
|
|
| 75 |
|
| 76 |
@database_sync_to_async
|
| 77 |
def is_participant(self, user, conversation_id):
|
|
@@ -86,15 +131,45 @@ class ChatConsumer(AsyncWebsocketConsumer):
|
|
| 86 |
return False
|
| 87 |
|
| 88 |
@database_sync_to_async
|
| 89 |
-
def
|
| 90 |
-
"""Sauvegarder le message
|
| 91 |
from django.utils import timezone
|
|
|
|
|
|
|
| 92 |
|
| 93 |
conversation = Conversation.objects.get(id=conversation_id)
|
| 94 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 95 |
message = Message.objects.create(
|
| 96 |
conversation=conversation,
|
| 97 |
-
sender=user
|
|
|
|
| 98 |
)
|
| 99 |
|
| 100 |
MessageContent.objects.create(
|
|
@@ -106,4 +181,4 @@ class ChatConsumer(AsyncWebsocketConsumer):
|
|
| 106 |
conversation.last_message_at = timezone.now()
|
| 107 |
conversation.save()
|
| 108 |
|
| 109 |
-
return message
|
|
|
|
| 2 |
# apps/messaging/consumers.py - WebSocket Consumer
|
| 3 |
# ============================================
|
| 4 |
import json
|
| 5 |
+
import datetime
|
| 6 |
from channels.generic.websocket import AsyncWebsocketConsumer
|
| 7 |
from channels.db import database_sync_to_async
|
| 8 |
from apps.messaging.models import Message, MessageContent, Conversation
|
|
|
|
| 27 |
await self.close()
|
| 28 |
return
|
| 29 |
|
| 30 |
+
# Stocker l'user_id pour filtrage ultérieur
|
| 31 |
+
self.user_id = user.id
|
| 32 |
+
|
| 33 |
# Rejoindre le groupe
|
| 34 |
await self.channel_layer.group_add(
|
| 35 |
self.room_group_name,
|
|
|
|
| 40 |
|
| 41 |
async def disconnect(self, close_code):
|
| 42 |
# Quitter le groupe
|
| 43 |
+
if hasattr(self, 'room_group_name'):
|
| 44 |
+
await self.channel_layer.group_discard(
|
| 45 |
+
self.room_group_name,
|
| 46 |
+
self.channel_name
|
| 47 |
+
)
|
| 48 |
|
| 49 |
async def receive(self, text_data):
|
| 50 |
"""Recevoir un message du WebSocket"""
|
|
|
|
| 52 |
message_content = data.get('message')
|
| 53 |
user = self.scope['user']
|
| 54 |
|
| 55 |
+
# Sauvegarder en base de données avec vérification du booking
|
| 56 |
+
message, is_visible, other_user_id = await self.save_message_with_visibility(
|
| 57 |
+
user, self.conversation_id, message_content
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
# Préparer les données du message
|
| 61 |
+
msg_data = {
|
| 62 |
+
'id': message.id,
|
| 63 |
+
'content': message_content,
|
| 64 |
+
'sender': {
|
| 65 |
+
'id': user.id,
|
| 66 |
+
'email': user.email
|
| 67 |
+
},
|
| 68 |
+
'timestamp': message.created_at.isoformat(),
|
| 69 |
+
'is_visible_to_recipient': is_visible
|
| 70 |
+
}
|
| 71 |
|
| 72 |
+
# Envoyer au groupe avec les infos de visibilité
|
| 73 |
await self.channel_layer.group_send(
|
| 74 |
self.room_group_name,
|
| 75 |
{
|
| 76 |
'type': 'chat_message',
|
| 77 |
+
'message': msg_data,
|
| 78 |
+
'sender_id': user.id,
|
| 79 |
+
'is_visible': is_visible
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 80 |
}
|
| 81 |
)
|
| 82 |
+
|
| 83 |
+
# Si le message n'est pas visible, notifier l'expéditeur directement
|
| 84 |
+
if not is_visible:
|
| 85 |
+
await self.send(text_data=json.dumps({
|
| 86 |
+
'type': 'message_queued',
|
| 87 |
+
'message': "Votre message sera délivré au destinataire lors de votre prochain rendez-vous.",
|
| 88 |
+
'message_id': message.id
|
| 89 |
+
}))
|
| 90 |
|
| 91 |
async def chat_message(self, event):
|
| 92 |
+
"""Envoyer le message au WebSocket - avec filtrage"""
|
| 93 |
+
message = event['message']
|
| 94 |
+
sender_id = event.get('sender_id')
|
| 95 |
+
is_visible = event.get('is_visible', True)
|
| 96 |
+
|
| 97 |
+
# Si je suis l'expéditeur, je vois toujours mon message
|
| 98 |
+
if sender_id == self.user_id:
|
| 99 |
+
await self.send(text_data=json.dumps({
|
| 100 |
+
'message': message
|
| 101 |
+
}))
|
| 102 |
+
# Si le message est visible pour le destinataire, l'envoyer
|
| 103 |
+
elif is_visible:
|
| 104 |
+
await self.send(text_data=json.dumps({
|
| 105 |
+
'message': message
|
| 106 |
+
}))
|
| 107 |
+
# Sinon, ne pas envoyer au destinataire (message en attente)
|
| 108 |
+
|
| 109 |
+
async def messages_unlocked(self, event):
|
| 110 |
+
"""
|
| 111 |
+
Notifie le client que des messages ont été débloqués.
|
| 112 |
+
Le frontend doit recharger la conversation pour voir les nouveaux messages.
|
| 113 |
+
"""
|
| 114 |
+
count = event.get('count', 0)
|
| 115 |
await self.send(text_data=json.dumps({
|
| 116 |
+
'type': 'messages_unlocked',
|
| 117 |
+
'count': count
|
| 118 |
}))
|
| 119 |
+
|
| 120 |
|
| 121 |
@database_sync_to_async
|
| 122 |
def is_participant(self, user, conversation_id):
|
|
|
|
| 131 |
return False
|
| 132 |
|
| 133 |
@database_sync_to_async
|
| 134 |
+
def save_message_with_visibility(self, user, conversation_id, content):
|
| 135 |
+
"""Sauvegarder le message avec vérification du booking actif"""
|
| 136 |
from django.utils import timezone
|
| 137 |
+
from apps.bookings.models import Booking
|
| 138 |
+
from django.db.models import Q
|
| 139 |
|
| 140 |
conversation = Conversation.objects.get(id=conversation_id)
|
| 141 |
|
| 142 |
+
# Trouver l'autre participant
|
| 143 |
+
other_participant = conversation.participants.exclude(user=user).filter(is_active=True).first()
|
| 144 |
+
other_user = other_participant.user if other_participant else None
|
| 145 |
+
other_user_id = other_user.id if other_user else None
|
| 146 |
+
|
| 147 |
+
# Vérifier s'il y a un booking actif
|
| 148 |
+
is_visible = False
|
| 149 |
+
if other_user:
|
| 150 |
+
now = timezone.now()
|
| 151 |
+
|
| 152 |
+
# Chercher un booking CONFIRMED en cours
|
| 153 |
+
active_booking = Booking.objects.filter(
|
| 154 |
+
Q(student=user, mentor=other_user) | Q(student=other_user, mentor=user),
|
| 155 |
+
status='CONFIRMED',
|
| 156 |
+
date=now.date(),
|
| 157 |
+
time__lte=now.time()
|
| 158 |
+
).first()
|
| 159 |
+
|
| 160 |
+
if active_booking:
|
| 161 |
+
# Vérifier si on est encore dans le créneau (2h max)
|
| 162 |
+
booking_start = datetime.datetime.combine(now.date(), active_booking.time)
|
| 163 |
+
booking_start = timezone.make_aware(booking_start)
|
| 164 |
+
|
| 165 |
+
if now <= booking_start + datetime.timedelta(hours=2):
|
| 166 |
+
is_visible = True
|
| 167 |
+
|
| 168 |
+
# Créer le message avec la visibilité appropriée
|
| 169 |
message = Message.objects.create(
|
| 170 |
conversation=conversation,
|
| 171 |
+
sender=user,
|
| 172 |
+
is_visible_to_recipient=is_visible
|
| 173 |
)
|
| 174 |
|
| 175 |
MessageContent.objects.create(
|
|
|
|
| 181 |
conversation.last_message_at = timezone.now()
|
| 182 |
conversation.save()
|
| 183 |
|
| 184 |
+
return message, is_visible, other_user_id
|
backend/apps/messaging/serializers.py
CHANGED
|
@@ -50,7 +50,30 @@ class ConversationSerializer(serializers.ModelSerializer):
|
|
| 50 |
return UserSerializer([p.user for p in parts], many=True).data
|
| 51 |
|
| 52 |
def get_last_message(self, obj):
|
| 53 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 54 |
return MessageSerializer(last_msg).data if last_msg else None
|
| 55 |
|
| 56 |
def get_unread_count(self, obj):
|
|
@@ -61,9 +84,21 @@ class ConversationSerializer(serializers.ModelSerializer):
|
|
| 61 |
is_active=True
|
| 62 |
).first()
|
| 63 |
if participant:
|
| 64 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 65 |
return 0
|
| 66 |
|
|
|
|
| 67 |
class MessageCreateSerializer(serializers.Serializer):
|
| 68 |
content = serializers.CharField(required=False, allow_blank=True)
|
| 69 |
attachments = serializers.ListField(
|
|
|
|
| 50 |
return UserSerializer([p.user for p in parts], many=True).data
|
| 51 |
|
| 52 |
def get_last_message(self, obj):
|
| 53 |
+
"""
|
| 54 |
+
Retourne le dernier message VISIBLE pour l'utilisateur actuel.
|
| 55 |
+
- Si l'utilisateur est l'expéditeur, il voit son message
|
| 56 |
+
- Sinon, le message doit avoir is_visible_to_recipient=True
|
| 57 |
+
"""
|
| 58 |
+
request = self.context.get('request')
|
| 59 |
+
if not request or not request.user.is_authenticated:
|
| 60 |
+
# Sans contexte, on affiche le dernier message visible
|
| 61 |
+
last_msg = obj.messages.filter(
|
| 62 |
+
is_active=True,
|
| 63 |
+
is_visible_to_recipient=True
|
| 64 |
+
).last()
|
| 65 |
+
return MessageSerializer(last_msg).data if last_msg else None
|
| 66 |
+
|
| 67 |
+
from django.db.models import Q
|
| 68 |
+
|
| 69 |
+
# Messages que l'utilisateur peut voir:
|
| 70 |
+
# 1. Messages qu'il a envoyés (sender=me)
|
| 71 |
+
# 2. Messages visibles (is_visible_to_recipient=True)
|
| 72 |
+
visible_messages = obj.messages.filter(is_active=True).filter(
|
| 73 |
+
Q(sender=request.user) | Q(is_visible_to_recipient=True)
|
| 74 |
+
)
|
| 75 |
+
|
| 76 |
+
last_msg = visible_messages.order_by('-created_at').first()
|
| 77 |
return MessageSerializer(last_msg).data if last_msg else None
|
| 78 |
|
| 79 |
def get_unread_count(self, obj):
|
|
|
|
| 84 |
is_active=True
|
| 85 |
).first()
|
| 86 |
if participant:
|
| 87 |
+
# Compter uniquement les messages non lus ET visibles
|
| 88 |
+
from django.db.models import Q
|
| 89 |
+
if not participant.last_read_at:
|
| 90 |
+
return obj.messages.filter(
|
| 91 |
+
is_active=True,
|
| 92 |
+
is_visible_to_recipient=True
|
| 93 |
+
).exclude(sender=request.user).count()
|
| 94 |
+
return obj.messages.filter(
|
| 95 |
+
is_active=True,
|
| 96 |
+
is_visible_to_recipient=True,
|
| 97 |
+
created_at__gt=participant.last_read_at
|
| 98 |
+
).exclude(sender=request.user).count()
|
| 99 |
return 0
|
| 100 |
|
| 101 |
+
|
| 102 |
class MessageCreateSerializer(serializers.Serializer):
|
| 103 |
content = serializers.CharField(required=False, allow_blank=True)
|
| 104 |
attachments = serializers.ListField(
|
backend/apps/messaging/tasks.py
ADDED
|
@@ -0,0 +1,192 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ============================================
|
| 2 |
+
# apps/messaging/tasks.py - Tâches Celery pour la messagerie
|
| 3 |
+
# ============================================
|
| 4 |
+
import datetime
|
| 5 |
+
from celery import shared_task
|
| 6 |
+
from django.utils import timezone
|
| 7 |
+
from django.db.models import Q
|
| 8 |
+
import logging
|
| 9 |
+
|
| 10 |
+
logger = logging.getLogger(__name__)
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
@shared_task(name='messaging.unlock_messages_for_booking')
|
| 14 |
+
def unlock_messages_for_booking(booking_id):
|
| 15 |
+
"""
|
| 16 |
+
Débloque tous les messages en attente entre deux utilisateurs
|
| 17 |
+
quand leur rendez-vous commence.
|
| 18 |
+
|
| 19 |
+
Cette tâche est planifiée pour s'exécuter à l'heure de début du booking.
|
| 20 |
+
"""
|
| 21 |
+
from apps.bookings.models import Booking
|
| 22 |
+
from apps.messaging.models import Conversation, Message
|
| 23 |
+
|
| 24 |
+
try:
|
| 25 |
+
booking = Booking.objects.get(id=booking_id)
|
| 26 |
+
|
| 27 |
+
# Vérifier que le booking est toujours confirmé
|
| 28 |
+
if booking.status != 'CONFIRMED':
|
| 29 |
+
logger.info(f"Booking {booking_id} n'est plus confirmé, skip.")
|
| 30 |
+
return
|
| 31 |
+
|
| 32 |
+
student = booking.student
|
| 33 |
+
mentor = booking.mentor
|
| 34 |
+
|
| 35 |
+
# Trouver la conversation entre ces deux users
|
| 36 |
+
conversation = Conversation.objects.filter(
|
| 37 |
+
participants__user=student,
|
| 38 |
+
participants__is_active=True,
|
| 39 |
+
is_active=True
|
| 40 |
+
).filter(
|
| 41 |
+
participants__user=mentor,
|
| 42 |
+
participants__is_active=True
|
| 43 |
+
).first()
|
| 44 |
+
|
| 45 |
+
if not conversation:
|
| 46 |
+
logger.info(f"Pas de conversation entre {student.id} et {mentor.id}")
|
| 47 |
+
return
|
| 48 |
+
|
| 49 |
+
# Débloquer tous les messages en attente dans cette conversation
|
| 50 |
+
updated_count = Message.objects.filter(
|
| 51 |
+
conversation=conversation,
|
| 52 |
+
is_visible_to_recipient=False,
|
| 53 |
+
is_active=True
|
| 54 |
+
).update(is_visible_to_recipient=True)
|
| 55 |
+
|
| 56 |
+
logger.info(f"Débloqué {updated_count} messages pour booking {booking_id}")
|
| 57 |
+
|
| 58 |
+
# Notifier les utilisateurs via WebSocket que des messages sont disponibles
|
| 59 |
+
try:
|
| 60 |
+
from channels.layers import get_channel_layer
|
| 61 |
+
from asgiref.sync import async_to_sync
|
| 62 |
+
|
| 63 |
+
channel_layer = get_channel_layer()
|
| 64 |
+
|
| 65 |
+
async_to_sync(channel_layer.group_send)(
|
| 66 |
+
f'chat_{conversation.id}',
|
| 67 |
+
{
|
| 68 |
+
'type': 'messages_unlocked',
|
| 69 |
+
'count': updated_count
|
| 70 |
+
}
|
| 71 |
+
)
|
| 72 |
+
except Exception as e:
|
| 73 |
+
logger.warning(f"Impossible de notifier via WebSocket: {e}")
|
| 74 |
+
|
| 75 |
+
return updated_count
|
| 76 |
+
|
| 77 |
+
except Booking.DoesNotExist:
|
| 78 |
+
logger.error(f"Booking {booking_id} introuvable")
|
| 79 |
+
return 0
|
| 80 |
+
except Exception as e:
|
| 81 |
+
logger.error(f"Erreur lors du déblocage des messages: {e}")
|
| 82 |
+
raise
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
@shared_task(name='messaging.schedule_unlock_for_upcoming_bookings')
|
| 86 |
+
def schedule_unlock_for_upcoming_bookings():
|
| 87 |
+
"""
|
| 88 |
+
Tâche périodique qui vérifie les bookings confirmés à venir
|
| 89 |
+
et planifie le déblocage des messages pour chacun.
|
| 90 |
+
|
| 91 |
+
À exécuter toutes les 5-10 minutes via Celery Beat.
|
| 92 |
+
"""
|
| 93 |
+
from apps.bookings.models import Booking
|
| 94 |
+
from django_celery_beat.models import PeriodicTask, ClockedSchedule
|
| 95 |
+
import json
|
| 96 |
+
|
| 97 |
+
now = timezone.now()
|
| 98 |
+
|
| 99 |
+
# Trouver les bookings confirmés qui commencent dans les 15 prochaines minutes
|
| 100 |
+
# et pour lesquels on n'a pas encore planifié de tâche
|
| 101 |
+
upcoming_bookings = Booking.objects.filter(
|
| 102 |
+
status='CONFIRMED',
|
| 103 |
+
date=now.date(),
|
| 104 |
+
time__gte=now.time(),
|
| 105 |
+
is_active=True
|
| 106 |
+
)
|
| 107 |
+
|
| 108 |
+
for booking in upcoming_bookings:
|
| 109 |
+
# Calculer l'heure de début
|
| 110 |
+
booking_start = datetime.datetime.combine(booking.date, booking.time)
|
| 111 |
+
booking_start = timezone.make_aware(booking_start)
|
| 112 |
+
|
| 113 |
+
# Si le booking commence dans moins de 15 minutes
|
| 114 |
+
if booking_start <= now + datetime.timedelta(minutes=15):
|
| 115 |
+
task_name = f'unlock_msgs_booking_{booking.id}'
|
| 116 |
+
|
| 117 |
+
# Vérifier si la tâche existe déjà
|
| 118 |
+
if not PeriodicTask.objects.filter(name=task_name).exists():
|
| 119 |
+
try:
|
| 120 |
+
# Créer un schedule pour l'heure exacte
|
| 121 |
+
clocked, _ = ClockedSchedule.objects.get_or_create(
|
| 122 |
+
clocked_time=booking_start
|
| 123 |
+
)
|
| 124 |
+
|
| 125 |
+
PeriodicTask.objects.create(
|
| 126 |
+
clocked=clocked,
|
| 127 |
+
name=task_name,
|
| 128 |
+
task='messaging.unlock_messages_for_booking',
|
| 129 |
+
args=json.dumps([booking.id]),
|
| 130 |
+
one_off=True, # Ne s'exécute qu'une fois
|
| 131 |
+
enabled=True
|
| 132 |
+
)
|
| 133 |
+
|
| 134 |
+
logger.info(f"Tâche planifiée pour booking {booking.id} à {booking_start}")
|
| 135 |
+
except Exception as e:
|
| 136 |
+
logger.error(f"Erreur lors de la planification pour booking {booking.id}: {e}")
|
| 137 |
+
|
| 138 |
+
return f"Vérifié {upcoming_bookings.count()} bookings"
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
@shared_task(name='messaging.check_pending_messages')
|
| 142 |
+
def check_pending_messages():
|
| 143 |
+
"""
|
| 144 |
+
Tâche périodique de sécurité qui vérifie s'il y a des messages
|
| 145 |
+
qui auraient dû être débloqués mais ne l'ont pas été.
|
| 146 |
+
|
| 147 |
+
Utile en cas de redémarrage du serveur ou de tâche ratée.
|
| 148 |
+
"""
|
| 149 |
+
from apps.messaging.models import Message
|
| 150 |
+
from apps.bookings.models import Booking
|
| 151 |
+
|
| 152 |
+
now = timezone.now()
|
| 153 |
+
|
| 154 |
+
# Trouver les messages en attente dont l'expéditeur et le destinataire
|
| 155 |
+
# ont eu un booking qui a déjà commencé
|
| 156 |
+
pending_messages = Message.objects.filter(
|
| 157 |
+
is_visible_to_recipient=False,
|
| 158 |
+
is_active=True
|
| 159 |
+
).select_related('conversation', 'sender')
|
| 160 |
+
|
| 161 |
+
unlocked_total = 0
|
| 162 |
+
|
| 163 |
+
for message in pending_messages:
|
| 164 |
+
conversation = message.conversation
|
| 165 |
+
sender = message.sender
|
| 166 |
+
|
| 167 |
+
# Trouver le destinataire
|
| 168 |
+
recipient_part = conversation.participants.exclude(user=sender).filter(is_active=True).first()
|
| 169 |
+
if not recipient_part:
|
| 170 |
+
continue
|
| 171 |
+
|
| 172 |
+
recipient = recipient_part.user
|
| 173 |
+
|
| 174 |
+
# Vérifier s'il y a un booking passé ou en cours entre eux
|
| 175 |
+
booking = Booking.objects.filter(
|
| 176 |
+
Q(student=sender, mentor=recipient) | Q(student=recipient, mentor=sender),
|
| 177 |
+
status='CONFIRMED',
|
| 178 |
+
is_active=True
|
| 179 |
+
).filter(
|
| 180 |
+
Q(date__lt=now.date()) |
|
| 181 |
+
Q(date=now.date(), time__lte=now.time())
|
| 182 |
+
).first()
|
| 183 |
+
|
| 184 |
+
if booking:
|
| 185 |
+
message.is_visible_to_recipient = True
|
| 186 |
+
message.save(update_fields=['is_visible_to_recipient'])
|
| 187 |
+
unlocked_total += 1
|
| 188 |
+
|
| 189 |
+
if unlocked_total > 0:
|
| 190 |
+
logger.info(f"Check périodique: débloqué {unlocked_total} messages")
|
| 191 |
+
|
| 192 |
+
return unlocked_total
|
backend/apps/messaging/views.py
CHANGED
|
@@ -201,27 +201,13 @@ class ConversationViewSet(HashIdMixin, viewsets.ModelViewSet):
|
|
| 201 |
)
|
| 202 |
serializer.is_valid(raise_exception=True)
|
| 203 |
|
| 204 |
-
# On surcharge la méthode create du serializer pour gérer ce champ ?
|
| 205 |
-
# Non, MessageCreateSerializer.create fait le create.
|
| 206 |
-
# On va modifier MessageCreateSerializer pour accepter un argument extra ou on update après.
|
| 207 |
-
# Update après est moins performant (2 DB calls) mais plus simple sans toucher au serializer complexe.
|
| 208 |
-
|
| 209 |
message = serializer.save()
|
| 210 |
|
| 211 |
if is_visible:
|
| 212 |
message.is_visible_to_recipient = True
|
| 213 |
message.save(update_fields=['is_visible_to_recipient'])
|
| 214 |
|
| 215 |
-
# Broadcast to WebSocket
|
| 216 |
-
# IMPORTANT: Si le message est caché, faut-il l'envoyer par WS ?
|
| 217 |
-
# OUI, mais le frontend du destinataire doit savoir qu'il est caché (ou ne pas l'afficher).
|
| 218 |
-
# MAIS si on l'envoie, le destinataire reçoit la data. C'est "envoyé mais pas vu".
|
| 219 |
-
# Le user a dit "le destinataire ne le voit pas".
|
| 220 |
-
# Si on l'envoie par WS, le JS le reçoit.
|
| 221 |
-
# On devrait peut-être NE PAS l'envoyer au destinataire via WS si caché.
|
| 222 |
-
# Ou l'envoyer avec un flag "hidden" et le front ne l'affiche pas ?
|
| 223 |
-
# Sécurité: Mieux vaut ne pas l'envoyer au destinataire.
|
| 224 |
-
|
| 225 |
try:
|
| 226 |
from channels.layers import get_channel_layer
|
| 227 |
from asgiref.sync import async_to_sync
|
|
@@ -230,26 +216,28 @@ class ConversationViewSet(HashIdMixin, viewsets.ModelViewSet):
|
|
| 230 |
|
| 231 |
msg_data = MessageSerializer(message).data
|
| 232 |
|
| 233 |
-
# Envoyer au groupe du chat
|
| 234 |
-
# Problème: le groupe inclut les deux users.
|
| 235 |
-
# Si on envoie, les deux reçoivent.
|
| 236 |
-
# On ne peut pas filtrer par destinataire facilement ici sauf si on a des groupes par user.
|
| 237 |
-
# Solution: Envoyer le message avec le champ 'is_visible_to_recipient'.
|
| 238 |
-
# Le frontend devra filtrer. C'est acceptable car ce n'est pas une donnée ultra sensible, juste une règle métier.
|
| 239 |
-
# Et le serializer inclut déjà 'is_visible_to_recipient' ? Non, il faut l'ajouter au serializer.
|
| 240 |
-
|
| 241 |
async_to_sync(channel_layer.group_send)(
|
| 242 |
f'chat_{conversation.id}',
|
| 243 |
{
|
| 244 |
'type': 'chat_message',
|
| 245 |
-
'message': msg_data
|
|
|
|
|
|
|
| 246 |
}
|
| 247 |
)
|
| 248 |
except Exception as e:
|
| 249 |
pass
|
| 250 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 251 |
return Response(
|
| 252 |
-
|
| 253 |
status=status.HTTP_201_CREATED
|
| 254 |
)
|
| 255 |
|
|
|
|
| 201 |
)
|
| 202 |
serializer.is_valid(raise_exception=True)
|
| 203 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 204 |
message = serializer.save()
|
| 205 |
|
| 206 |
if is_visible:
|
| 207 |
message.is_visible_to_recipient = True
|
| 208 |
message.save(update_fields=['is_visible_to_recipient'])
|
| 209 |
|
| 210 |
+
# Broadcast to WebSocket avec les infos de visibilité
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 211 |
try:
|
| 212 |
from channels.layers import get_channel_layer
|
| 213 |
from asgiref.sync import async_to_sync
|
|
|
|
| 216 |
|
| 217 |
msg_data = MessageSerializer(message).data
|
| 218 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 219 |
async_to_sync(channel_layer.group_send)(
|
| 220 |
f'chat_{conversation.id}',
|
| 221 |
{
|
| 222 |
'type': 'chat_message',
|
| 223 |
+
'message': msg_data,
|
| 224 |
+
'sender_id': request.user.id,
|
| 225 |
+
'is_visible': is_visible
|
| 226 |
}
|
| 227 |
)
|
| 228 |
except Exception as e:
|
| 229 |
pass
|
| 230 |
|
| 231 |
+
# Préparer la réponse
|
| 232 |
+
response_data = MessageSerializer(message).data
|
| 233 |
+
|
| 234 |
+
# Ajouter un avertissement si le message est en attente
|
| 235 |
+
if not is_visible:
|
| 236 |
+
response_data['queued'] = True
|
| 237 |
+
response_data['queued_message'] = "Votre message sera délivré au destinataire lors de votre prochain rendez-vous."
|
| 238 |
+
|
| 239 |
return Response(
|
| 240 |
+
response_data,
|
| 241 |
status=status.HTTP_201_CREATED
|
| 242 |
)
|
| 243 |
|
backend/apps/opportunities/views.py
CHANGED
|
@@ -5,14 +5,14 @@ from rest_framework import viewsets, filters
|
|
| 5 |
from django_filters.rest_framework import DjangoFilterBackend
|
| 6 |
from rest_framework.decorators import action
|
| 7 |
from rest_framework.response import Response
|
| 8 |
-
from rest_framework.permissions import IsAuthenticated
|
| 9 |
|
| 10 |
from apps.opportunities.models import Opportunity, OpportunityView
|
| 11 |
from apps.opportunities.serializers import OpportunitySerializer
|
| 12 |
|
| 13 |
class OpportunityViewSet(viewsets.ReadOnlyModelViewSet):
|
| 14 |
"""Liste des opportunités"""
|
| 15 |
-
permission_classes = [
|
| 16 |
queryset = Opportunity.objects.filter(is_active=True)
|
| 17 |
serializer_class = OpportunitySerializer
|
| 18 |
filter_backends = [DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter]
|
|
|
|
| 5 |
from django_filters.rest_framework import DjangoFilterBackend
|
| 6 |
from rest_framework.decorators import action
|
| 7 |
from rest_framework.response import Response
|
| 8 |
+
from rest_framework.permissions import IsAuthenticated, AllowAny
|
| 9 |
|
| 10 |
from apps.opportunities.models import Opportunity, OpportunityView
|
| 11 |
from apps.opportunities.serializers import OpportunitySerializer
|
| 12 |
|
| 13 |
class OpportunityViewSet(viewsets.ReadOnlyModelViewSet):
|
| 14 |
"""Liste des opportunités"""
|
| 15 |
+
permission_classes = [AllowAny]
|
| 16 |
queryset = Opportunity.objects.filter(is_active=True)
|
| 17 |
serializer_class = OpportunitySerializer
|
| 18 |
filter_backends = [DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter]
|
backend/apps/users/apps.py
CHANGED
|
@@ -4,3 +4,6 @@ from django.apps import AppConfig
|
|
| 4 |
class UsersConfig(AppConfig):
|
| 5 |
default_auto_field = 'django.db.models.BigAutoField'
|
| 6 |
name = 'apps.users'
|
|
|
|
|
|
|
|
|
|
|
|
| 4 |
class UsersConfig(AppConfig):
|
| 5 |
default_auto_field = 'django.db.models.BigAutoField'
|
| 6 |
name = 'apps.users'
|
| 7 |
+
|
| 8 |
+
def ready(self):
|
| 9 |
+
import apps.users.signals # noqa
|
backend/apps/users/encryption.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ============================================
|
| 2 |
+
# apps/users/encryption.py - Service de chiffrement
|
| 3 |
+
# ============================================
|
| 4 |
+
from cryptography.hazmat.primitives import serialization
|
| 5 |
+
from cryptography.hazmat.primitives.asymmetric import rsa
|
| 6 |
+
from cryptography.hazmat.backends import default_backend
|
| 7 |
+
import logging
|
| 8 |
+
|
| 9 |
+
logger = logging.getLogger(__name__)
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def generate_rsa_keypair():
|
| 13 |
+
"""
|
| 14 |
+
Génère une paire de clés RSA 2048 bits.
|
| 15 |
+
Retourne un tuple (public_key_pem, private_key_pem)
|
| 16 |
+
"""
|
| 17 |
+
try:
|
| 18 |
+
# Générer la clé privée
|
| 19 |
+
private_key = rsa.generate_private_key(
|
| 20 |
+
public_exponent=65537,
|
| 21 |
+
key_size=2048,
|
| 22 |
+
backend=default_backend()
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
# Convertir la clé privée en PEM
|
| 26 |
+
private_key_pem = private_key.private_bytes(
|
| 27 |
+
encoding=serialization.Encoding.PEM,
|
| 28 |
+
format=serialization.PrivateFormat.PKCS8,
|
| 29 |
+
encryption_algorithm=serialization.NoEncryption()
|
| 30 |
+
).decode('utf-8')
|
| 31 |
+
|
| 32 |
+
# Extraire la clé publique et la convertir en PEM
|
| 33 |
+
public_key = private_key.public_key()
|
| 34 |
+
public_key_pem = public_key.public_bytes(
|
| 35 |
+
encoding=serialization.Encoding.PEM,
|
| 36 |
+
format=serialization.PublicFormat.SubjectPublicKeyInfo
|
| 37 |
+
).decode('utf-8')
|
| 38 |
+
|
| 39 |
+
return public_key_pem, private_key_pem
|
| 40 |
+
|
| 41 |
+
except Exception as e:
|
| 42 |
+
logger.error(f"Erreur lors de la génération des clés RSA: {e}")
|
| 43 |
+
return None, None
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def ensure_user_has_keys(user_profile):
|
| 47 |
+
"""
|
| 48 |
+
S'assure qu'un profil utilisateur a des clés de chiffrement.
|
| 49 |
+
Si non, les génère et les sauvegarde.
|
| 50 |
+
|
| 51 |
+
Args:
|
| 52 |
+
user_profile: Instance de UserProfile
|
| 53 |
+
|
| 54 |
+
Returns:
|
| 55 |
+
bool: True si les clés existent ou ont été générées avec succès
|
| 56 |
+
"""
|
| 57 |
+
if user_profile.public_key and user_profile.encrypted_private_key:
|
| 58 |
+
return True
|
| 59 |
+
|
| 60 |
+
try:
|
| 61 |
+
public_key, private_key = generate_rsa_keypair()
|
| 62 |
+
|
| 63 |
+
if public_key and private_key:
|
| 64 |
+
user_profile.public_key = public_key
|
| 65 |
+
# Note: En production, la clé privée devrait être chiffrée
|
| 66 |
+
# avec le mot de passe de l'utilisateur ou un secret dérivé.
|
| 67 |
+
# Pour l'instant, on la stocke en clair (le frontend la chiffre lui-même).
|
| 68 |
+
user_profile.encrypted_private_key = private_key
|
| 69 |
+
user_profile.save(update_fields=['public_key', 'encrypted_private_key'])
|
| 70 |
+
|
| 71 |
+
logger.info(f"Clés générées pour le profil {user_profile.id}")
|
| 72 |
+
return True
|
| 73 |
+
|
| 74 |
+
return False
|
| 75 |
+
|
| 76 |
+
except Exception as e:
|
| 77 |
+
logger.error(f"Erreur lors de la génération des clés pour profil {user_profile.id}: {e}")
|
| 78 |
+
return False
|
backend/apps/users/management/__init__.py
CHANGED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
# Empty file to make this a Python package
|
backend/apps/users/management/commands/__init__.py
CHANGED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
# Empty file to make this a Python package
|
backend/apps/users/management/commands/fix_profile_flags.py
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ============================================
|
| 2 |
+
# apps/users/management/commands/fix_profile_flags.py
|
| 3 |
+
# ============================================
|
| 4 |
+
from django.core.management.base import BaseCommand
|
| 5 |
+
from apps.users.models import UserProfile
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class Command(BaseCommand):
|
| 9 |
+
help = 'Corrige les flags is_current des profils utilisateurs'
|
| 10 |
+
|
| 11 |
+
def handle(self, *args, **options):
|
| 12 |
+
# Trouver tous les profils sans is_current=True
|
| 13 |
+
profiles_to_fix = UserProfile.objects.filter(is_current=False)
|
| 14 |
+
|
| 15 |
+
total = profiles_to_fix.count()
|
| 16 |
+
self.stdout.write(f"Profils à corriger: {total}")
|
| 17 |
+
|
| 18 |
+
if total == 0:
|
| 19 |
+
self.stdout.write(self.style.SUCCESS("Tous les profils sont déjà corrects!"))
|
| 20 |
+
return
|
| 21 |
+
|
| 22 |
+
# Mettre à jour tous les profils
|
| 23 |
+
updated = profiles_to_fix.update(is_current=True)
|
| 24 |
+
|
| 25 |
+
self.stdout.write(
|
| 26 |
+
self.style.SUCCESS(f"✓ {updated} profils mis à jour avec is_current=True")
|
| 27 |
+
)
|
| 28 |
+
|
| 29 |
+
# Vérifier que tous ont des clés
|
| 30 |
+
profiles_without_keys = UserProfile.objects.filter(
|
| 31 |
+
is_current=True
|
| 32 |
+
).filter(
|
| 33 |
+
public_key__isnull=True
|
| 34 |
+
) | UserProfile.objects.filter(
|
| 35 |
+
is_current=True,
|
| 36 |
+
public_key=''
|
| 37 |
+
)
|
| 38 |
+
|
| 39 |
+
if profiles_without_keys.exists():
|
| 40 |
+
self.stdout.write(
|
| 41 |
+
self.style.WARNING(
|
| 42 |
+
f"\n⚠ {profiles_without_keys.count()} profils n'ont pas de clés de chiffrement."
|
| 43 |
+
)
|
| 44 |
+
)
|
| 45 |
+
self.stdout.write(
|
| 46 |
+
"Exécutez: python manage.py generate_encryption_keys"
|
| 47 |
+
)
|
backend/apps/users/management/commands/generate_encryption_keys.py
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ============================================
|
| 2 |
+
# apps/users/management/commands/generate_encryption_keys.py
|
| 3 |
+
# ============================================
|
| 4 |
+
from django.core.management.base import BaseCommand
|
| 5 |
+
from apps.users.models import UserProfile
|
| 6 |
+
from apps.users.encryption import ensure_user_has_keys
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class Command(BaseCommand):
|
| 10 |
+
help = 'Génère les clés de chiffrement pour tous les profils qui n\'en ont pas'
|
| 11 |
+
|
| 12 |
+
def add_arguments(self, parser):
|
| 13 |
+
parser.add_argument(
|
| 14 |
+
'--force',
|
| 15 |
+
action='store_true',
|
| 16 |
+
help='Régénère les clés même si elles existent déjà',
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
def handle(self, *args, **options):
|
| 20 |
+
force = options['force']
|
| 21 |
+
|
| 22 |
+
# Récupérer tous les profils courants
|
| 23 |
+
profiles = UserProfile.objects.filter(is_current=True)
|
| 24 |
+
|
| 25 |
+
if not force:
|
| 26 |
+
# Filtrer ceux qui n'ont pas de clés
|
| 27 |
+
profiles = profiles.filter(
|
| 28 |
+
public_key__isnull=True
|
| 29 |
+
) | profiles.filter(
|
| 30 |
+
public_key=''
|
| 31 |
+
) | profiles.filter(
|
| 32 |
+
encrypted_private_key__isnull=True
|
| 33 |
+
) | profiles.filter(
|
| 34 |
+
encrypted_private_key=''
|
| 35 |
+
)
|
| 36 |
+
|
| 37 |
+
total = profiles.count()
|
| 38 |
+
self.stdout.write(f"Profils à traiter: {total}")
|
| 39 |
+
|
| 40 |
+
success = 0
|
| 41 |
+
failed = 0
|
| 42 |
+
|
| 43 |
+
for profile in profiles:
|
| 44 |
+
if ensure_user_has_keys(profile):
|
| 45 |
+
success += 1
|
| 46 |
+
self.stdout.write(
|
| 47 |
+
self.style.SUCCESS(f"✓ Clés générées pour {profile.user.email}")
|
| 48 |
+
)
|
| 49 |
+
else:
|
| 50 |
+
failed += 1
|
| 51 |
+
self.stdout.write(
|
| 52 |
+
self.style.ERROR(f"✗ Échec pour {profile.user.email}")
|
| 53 |
+
)
|
| 54 |
+
|
| 55 |
+
self.stdout.write('')
|
| 56 |
+
self.stdout.write(self.style.SUCCESS(f"Terminé: {success} succès, {failed} échecs"))
|
backend/apps/users/serializers.py
CHANGED
|
@@ -93,17 +93,24 @@ class UserRegistrationSerializer(serializers.Serializer):
|
|
| 93 |
country = validated_data.pop('country', None)
|
| 94 |
university = validated_data.pop('university', None)
|
| 95 |
|
| 96 |
-
# Créer l'utilisateur
|
| 97 |
user = User.objects.create_user(**validated_data)
|
| 98 |
|
| 99 |
-
#
|
| 100 |
-
profile = UserProfile.objects.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 101 |
|
| 102 |
if country:
|
| 103 |
-
UserCountry.objects.
|
| 104 |
|
| 105 |
if university:
|
| 106 |
-
UserUniversity.objects.
|
| 107 |
|
| 108 |
return user
|
| 109 |
|
|
|
|
| 93 |
country = validated_data.pop('country', None)
|
| 94 |
university = validated_data.pop('university', None)
|
| 95 |
|
| 96 |
+
# Créer l'utilisateur (déclenche le signal post_save qui crée un profil de base)
|
| 97 |
user = User.objects.create_user(**validated_data)
|
| 98 |
|
| 99 |
+
# Récupérer le profil créé par le signal et le mettre à jour avec les vraies infos
|
| 100 |
+
profile = UserProfile.objects.filter(user=user).first()
|
| 101 |
+
if not profile:
|
| 102 |
+
# Au cas où le signal n'aurait pas fonctionné
|
| 103 |
+
profile = UserProfile.objects.create(user=user, name=name, is_current=True)
|
| 104 |
+
else:
|
| 105 |
+
profile.name = name
|
| 106 |
+
profile.is_current = True # S'assurer que le profil est actif
|
| 107 |
+
profile.save()
|
| 108 |
|
| 109 |
if country:
|
| 110 |
+
UserCountry.objects.update_or_create(profile=profile, defaults={'country': country, 'is_current': True})
|
| 111 |
|
| 112 |
if university:
|
| 113 |
+
UserUniversity.objects.update_or_create(profile=profile, defaults={'university': university, 'is_current': True})
|
| 114 |
|
| 115 |
return user
|
| 116 |
|
backend/apps/users/signals.py
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ============================================
|
| 2 |
+
# apps/users/signals.py - Signaux pour les utilisateurs
|
| 3 |
+
# ============================================
|
| 4 |
+
from django.db.models.signals import post_save
|
| 5 |
+
from django.dispatch import receiver
|
| 6 |
+
import logging
|
| 7 |
+
|
| 8 |
+
logger = logging.getLogger(__name__)
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
@receiver(post_save, sender='users.UserProfile')
|
| 12 |
+
def generate_encryption_keys_on_profile_create(sender, instance, created, **kwargs):
|
| 13 |
+
"""
|
| 14 |
+
Génère automatiquement les clés de chiffrement quand un profil est créé
|
| 15 |
+
ou si le profil n'a pas de clés.
|
| 16 |
+
"""
|
| 17 |
+
# Vérifier si le profil n'a pas déjà de clés
|
| 18 |
+
if not instance.public_key or not instance.encrypted_private_key:
|
| 19 |
+
try:
|
| 20 |
+
from apps.users.encryption import generate_rsa_keypair
|
| 21 |
+
|
| 22 |
+
public_key, private_key = generate_rsa_keypair()
|
| 23 |
+
|
| 24 |
+
if public_key and private_key:
|
| 25 |
+
# Utiliser update() pour éviter de déclencher à nouveau le signal
|
| 26 |
+
from apps.users.models import UserProfile
|
| 27 |
+
UserProfile.objects.filter(pk=instance.pk).update(
|
| 28 |
+
public_key=public_key,
|
| 29 |
+
encrypted_private_key=private_key,
|
| 30 |
+
is_current=True # S'assurer que le profil est actif
|
| 31 |
+
)
|
| 32 |
+
logger.info(f"Clés de chiffrement générées pour le profil {instance.pk}")
|
| 33 |
+
except Exception as e:
|
| 34 |
+
logger.error(f"Erreur lors de la génération des clés: {e}")
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
@receiver(post_save, sender='users.User')
|
| 38 |
+
def ensure_user_profile_has_keys(sender, instance, created, **kwargs):
|
| 39 |
+
"""
|
| 40 |
+
S'assure que le profil courant de l'utilisateur a des clés de chiffrement.
|
| 41 |
+
"""
|
| 42 |
+
try:
|
| 43 |
+
profile = instance.profiles.filter(is_current=True).first()
|
| 44 |
+
|
| 45 |
+
if profile and (not profile.public_key or not profile.encrypted_private_key):
|
| 46 |
+
from apps.users.encryption import ensure_user_has_keys
|
| 47 |
+
ensure_user_has_keys(profile)
|
| 48 |
+
except Exception as e:
|
| 49 |
+
logger.error(f"Erreur lors de la vérification des clés utilisateur: {e}")
|
backend/celery.py
DELETED
|
@@ -1,12 +0,0 @@
|
|
| 1 |
-
import os
|
| 2 |
-
from celery import Celery
|
| 3 |
-
|
| 4 |
-
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'educonnect_api.settings')
|
| 5 |
-
|
| 6 |
-
app = Celery('educonnect_api')
|
| 7 |
-
app.config_from_object('django.conf:settings', namespace='CELERY')
|
| 8 |
-
app.autodiscover_tasks()
|
| 9 |
-
|
| 10 |
-
@app.task(bind=True)
|
| 11 |
-
def debug_task(self):
|
| 12 |
-
print(f'Request: {self.request!r}')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
backend/educonnect/__init__.py
CHANGED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ============================================
|
| 2 |
+
# educonnect/__init__.py
|
| 3 |
+
# ============================================
|
| 4 |
+
# This will make sure the app is always imported when
|
| 5 |
+
# Django starts so that shared_task will use this app.
|
| 6 |
+
from .celery import app as celery_app
|
| 7 |
+
|
| 8 |
+
__all__ = ('celery_app',)
|
backend/educonnect/asgi.py
CHANGED
|
@@ -1,29 +1,25 @@
|
|
| 1 |
-
# ============================================
|
| 2 |
-
# educonnect_api/asgi.py
|
| 3 |
-
# ============================================
|
| 4 |
-
|
| 5 |
import os
|
|
|
|
| 6 |
from django.core.asgi import get_asgi_application
|
| 7 |
-
from channels.routing import ProtocolTypeRouter, URLRouter
|
| 8 |
-
from channels.auth import AuthMiddlewareStack
|
| 9 |
-
from apps.messaging.consumers import ChatConsumer
|
| 10 |
-
from django.urls import path
|
| 11 |
|
| 12 |
-
|
|
|
|
| 13 |
|
|
|
|
| 14 |
django_asgi_app = get_asgi_application()
|
| 15 |
|
|
|
|
| 16 |
from channels.routing import ProtocolTypeRouter, URLRouter
|
| 17 |
-
|
|
|
|
| 18 |
from apps.notifications.consumers import NotificationConsumer
|
|
|
|
| 19 |
|
| 20 |
websocket_urlpatterns = [
|
| 21 |
path('ws/chat/<str:conversation_id>/', ChatConsumer.as_asgi()),
|
| 22 |
path('ws/notifications/', NotificationConsumer.as_asgi()),
|
| 23 |
]
|
| 24 |
|
| 25 |
-
from educonnect.middleware import JwtAuthMiddleware
|
| 26 |
-
|
| 27 |
application = ProtocolTypeRouter({
|
| 28 |
"http": django_asgi_app,
|
| 29 |
"websocket": JwtAuthMiddleware(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import os
|
| 2 |
+
import django
|
| 3 |
from django.core.asgi import get_asgi_application
|
|
|
|
|
|
|
|
|
|
|
|
|
| 4 |
|
| 5 |
+
# Définir les paramètres Django avant tout import d'application
|
| 6 |
+
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'educonnect.settings')
|
| 7 |
|
| 8 |
+
# Initialiser Django
|
| 9 |
django_asgi_app = get_asgi_application()
|
| 10 |
|
| 11 |
+
# Maintenant on peut importer le reste
|
| 12 |
from channels.routing import ProtocolTypeRouter, URLRouter
|
| 13 |
+
from django.urls import path
|
| 14 |
+
from apps.messaging.consumers import ChatConsumer
|
| 15 |
from apps.notifications.consumers import NotificationConsumer
|
| 16 |
+
from educonnect.middleware import JwtAuthMiddleware
|
| 17 |
|
| 18 |
websocket_urlpatterns = [
|
| 19 |
path('ws/chat/<str:conversation_id>/', ChatConsumer.as_asgi()),
|
| 20 |
path('ws/notifications/', NotificationConsumer.as_asgi()),
|
| 21 |
]
|
| 22 |
|
|
|
|
|
|
|
| 23 |
application = ProtocolTypeRouter({
|
| 24 |
"http": django_asgi_app,
|
| 25 |
"websocket": JwtAuthMiddleware(
|
backend/educonnect/celery.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ============================================
|
| 2 |
+
# educonnect/celery.py - Configuration Celery
|
| 3 |
+
# ============================================
|
| 4 |
+
import os
|
| 5 |
+
from celery import Celery
|
| 6 |
+
|
| 7 |
+
# set the default Django settings module for the 'celery' program.
|
| 8 |
+
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'educonnect.settings')
|
| 9 |
+
|
| 10 |
+
app = Celery('educonnect')
|
| 11 |
+
|
| 12 |
+
# Using a string here means the worker doesn't have to serialize
|
| 13 |
+
# the configuration object to child processes.
|
| 14 |
+
app.config_from_object('django.conf:settings', namespace='CELERY')
|
| 15 |
+
|
| 16 |
+
# Load task modules from all registered Django app configs.
|
| 17 |
+
app.autodiscover_tasks()
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
@app.task(bind=True, ignore_result=True)
|
| 21 |
+
def debug_task(self):
|
| 22 |
+
print(f'Request: {self.request!r}')
|
backend/educonnect/settings.py
CHANGED
|
@@ -30,6 +30,7 @@ INSTALLED_APPS = [
|
|
| 30 |
'django_filters',
|
| 31 |
'drf_spectacular',
|
| 32 |
'channels',
|
|
|
|
| 33 |
|
| 34 |
# Local apps
|
| 35 |
'apps.users',
|
|
@@ -79,31 +80,31 @@ WSGI_APPLICATION = 'educonnect.wsgi.application'
|
|
| 79 |
ASGI_APPLICATION = 'educonnect.asgi.application'
|
| 80 |
|
| 81 |
# Database
|
| 82 |
-
|
| 83 |
-
|
| 84 |
-
'
|
| 85 |
-
|
| 86 |
-
|
| 87 |
-
'
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 88 |
}
|
| 89 |
}
|
| 90 |
-
}
|
| 91 |
-
|
| 92 |
-
# Uncomment below for PostgreSQL
|
| 93 |
-
# DATABASES = {
|
| 94 |
-
# 'default': {
|
| 95 |
-
# 'ENGINE': 'django.db.backends.postgresql',
|
| 96 |
-
# 'NAME': config('DB_NAME', default='educonnect_db'),
|
| 97 |
-
# 'USER': config('DB_USER', default='postgres'),
|
| 98 |
-
# 'PASSWORD': config('DB_PASSWORD', default='postgres'),
|
| 99 |
-
# 'HOST': config('DB_HOST', default='localhost'),
|
| 100 |
-
# 'PORT': config('DB_PORT', default='5432'),
|
| 101 |
-
# 'CONN_MAX_AGE': 600,
|
| 102 |
-
# 'OPTIONS': {
|
| 103 |
-
# 'connect_timeout': 10,
|
| 104 |
-
# }
|
| 105 |
-
# }
|
| 106 |
-
# }
|
| 107 |
|
| 108 |
# Custom User Model
|
| 109 |
AUTH_USER_MODEL = 'users.User'
|
|
@@ -297,3 +298,27 @@ BADGE_THRESHOLDS = {
|
|
| 297 |
'MASTER': {'points': 2500},
|
| 298 |
'LEGEND': {'points': 5000},
|
| 299 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 30 |
'django_filters',
|
| 31 |
'drf_spectacular',
|
| 32 |
'channels',
|
| 33 |
+
'django_celery_beat',
|
| 34 |
|
| 35 |
# Local apps
|
| 36 |
'apps.users',
|
|
|
|
| 80 |
ASGI_APPLICATION = 'educonnect.asgi.application'
|
| 81 |
|
| 82 |
# Database
|
| 83 |
+
if config('DB_HOST', default=None):
|
| 84 |
+
DATABASES = {
|
| 85 |
+
'default': {
|
| 86 |
+
'ENGINE': 'django.db.backends.postgresql',
|
| 87 |
+
'NAME': config('DB_NAME', default='educonnect_db'),
|
| 88 |
+
'USER': config('DB_USER', default='postgres'),
|
| 89 |
+
'PASSWORD': config('DB_PASSWORD', default='postgres'),
|
| 90 |
+
'HOST': config('DB_HOST'),
|
| 91 |
+
'PORT': config('DB_PORT', default='5432'),
|
| 92 |
+
'CONN_MAX_AGE': 600,
|
| 93 |
+
'OPTIONS': {
|
| 94 |
+
'connect_timeout': 10,
|
| 95 |
+
}
|
| 96 |
+
}
|
| 97 |
+
}
|
| 98 |
+
else:
|
| 99 |
+
DATABASES = {
|
| 100 |
+
'default': {
|
| 101 |
+
'ENGINE': 'django.db.backends.sqlite3',
|
| 102 |
+
'NAME': BASE_DIR / 'db.sqlite3',
|
| 103 |
+
'OPTIONS': {
|
| 104 |
+
'timeout': 20,
|
| 105 |
+
}
|
| 106 |
}
|
| 107 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 108 |
|
| 109 |
# Custom User Model
|
| 110 |
AUTH_USER_MODEL = 'users.User'
|
|
|
|
| 298 |
'MASTER': {'points': 2500},
|
| 299 |
'LEGEND': {'points': 5000},
|
| 300 |
}
|
| 301 |
+
|
| 302 |
+
# Celery Configuration
|
| 303 |
+
CELERY_BROKER_URL = config('CELERY_BROKER_URL', default='redis://localhost:6379/0')
|
| 304 |
+
CELERY_RESULT_BACKEND = config('CELERY_RESULT_BACKEND', default='redis://localhost:6379/0')
|
| 305 |
+
CELERY_ACCEPT_CONTENT = ['json']
|
| 306 |
+
CELERY_TASK_SERIALIZER = 'json'
|
| 307 |
+
CELERY_RESULT_SERIALIZER = 'json'
|
| 308 |
+
CELERY_TIMEZONE = TIME_ZONE
|
| 309 |
+
CELERY_TASK_TRACK_STARTED = True
|
| 310 |
+
CELERY_TASK_TIME_LIMIT = 30 * 60 # 30 minutes max
|
| 311 |
+
|
| 312 |
+
# Celery Beat - Tâches périodiques
|
| 313 |
+
CELERY_BEAT_SCHEDULER = 'django_celery_beat.schedulers:DatabaseScheduler'
|
| 314 |
+
CELERY_BEAT_SCHEDULE = {
|
| 315 |
+
'check-pending-messages-every-5-minutes': {
|
| 316 |
+
'task': 'messaging.check_pending_messages',
|
| 317 |
+
'schedule': 300.0, # 5 minutes
|
| 318 |
+
},
|
| 319 |
+
'schedule-unlock-upcoming-bookings': {
|
| 320 |
+
'task': 'messaging.schedule_unlock_for_upcoming_bookings',
|
| 321 |
+
'schedule': 600.0, # 10 minutes
|
| 322 |
+
},
|
| 323 |
+
}
|
| 324 |
+
|
backend/list_models.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from google import genai
|
| 2 |
+
import os
|
| 3 |
+
from decouple import config
|
| 4 |
+
|
| 5 |
+
def list_models():
|
| 6 |
+
api_key = config('GEMINI_API_KEY', default='')
|
| 7 |
+
if not api_key:
|
| 8 |
+
print("GEMINI_API_KEY not found")
|
| 9 |
+
return
|
| 10 |
+
|
| 11 |
+
client = genai.Client(api_key=api_key)
|
| 12 |
+
try:
|
| 13 |
+
print("Listing models...")
|
| 14 |
+
for model in client.models.list():
|
| 15 |
+
if 'generateContent' in model.supported_actions:
|
| 16 |
+
print(f"Model: {model.name}")
|
| 17 |
+
except Exception as e:
|
| 18 |
+
print(f"Error: {e}")
|
| 19 |
+
|
| 20 |
+
if __name__ == "__main__":
|
| 21 |
+
list_models()
|
backend/requirements.txt
CHANGED
|
@@ -1,6 +1,6 @@
|
|
| 1 |
amqp==5.3.1
|
| 2 |
annotated-types==0.7.0
|
| 3 |
-
anyio
|
| 4 |
asgiref==3.11.0
|
| 5 |
attrs==25.4.0
|
| 6 |
autobahn==25.11.1
|
|
@@ -32,9 +32,11 @@ django-allauth==0.57.0
|
|
| 32 |
django-cors-headers==4.3.0
|
| 33 |
django-filter==23.3
|
| 34 |
django-ses==3.5.0
|
|
|
|
|
|
|
| 35 |
django-storages==1.14.2
|
| 36 |
djangorestframework==3.14.0
|
| 37 |
-
djangorestframework-simplejwt==5.
|
| 38 |
drf-spectacular==0.26.5
|
| 39 |
factory-boy==3.3.0
|
| 40 |
Faker==20.1.0
|
|
@@ -42,7 +44,7 @@ flake8==6.1.0
|
|
| 42 |
google-ai-generativelanguage==0.4.0
|
| 43 |
google-api-core==2.28.1
|
| 44 |
google-auth==2.43.0
|
| 45 |
-
google-genai
|
| 46 |
google-generativeai==0.3.1
|
| 47 |
googleapis-common-protos==1.72.0
|
| 48 |
grpcio==1.76.0
|
|
@@ -66,7 +68,7 @@ mccabe==0.7.0
|
|
| 66 |
msgpack==1.1.2
|
| 67 |
mypy_extensions==1.1.0
|
| 68 |
oauthlib==3.3.1
|
| 69 |
-
openai
|
| 70 |
packaging==25.0
|
| 71 |
pathspec==0.12.1
|
| 72 |
Pillow==10.1.0
|
|
@@ -122,3 +124,4 @@ wcwidth==0.2.14
|
|
| 122 |
websockets==15.0.1
|
| 123 |
whitenoise==6.6.0
|
| 124 |
zope.interface==8.1.1
|
|
|
|
|
|
| 1 |
amqp==5.3.1
|
| 2 |
annotated-types==0.7.0
|
| 3 |
+
anyio>=3.5.0
|
| 4 |
asgiref==3.11.0
|
| 5 |
attrs==25.4.0
|
| 6 |
autobahn==25.11.1
|
|
|
|
| 32 |
django-cors-headers==4.3.0
|
| 33 |
django-filter==23.3
|
| 34 |
django-ses==3.5.0
|
| 35 |
+
django-celery-beat==2.5.0
|
| 36 |
+
django-celery-results==2.5.1
|
| 37 |
django-storages==1.14.2
|
| 38 |
djangorestframework==3.14.0
|
| 39 |
+
djangorestframework-simplejwt==5.5.1
|
| 40 |
drf-spectacular==0.26.5
|
| 41 |
factory-boy==3.3.0
|
| 42 |
Faker==20.1.0
|
|
|
|
| 44 |
google-ai-generativelanguage==0.4.0
|
| 45 |
google-api-core==2.28.1
|
| 46 |
google-auth==2.43.0
|
| 47 |
+
google-genai>=0.1.0
|
| 48 |
google-generativeai==0.3.1
|
| 49 |
googleapis-common-protos==1.72.0
|
| 50 |
grpcio==1.76.0
|
|
|
|
| 68 |
msgpack==1.1.2
|
| 69 |
mypy_extensions==1.1.0
|
| 70 |
oauthlib==3.3.1
|
| 71 |
+
openai>=1.3.7
|
| 72 |
packaging==25.0
|
| 73 |
pathspec==0.12.1
|
| 74 |
Pillow==10.1.0
|
|
|
|
| 124 |
websockets==15.0.1
|
| 125 |
whitenoise==6.6.0
|
| 126 |
zope.interface==8.1.1
|
| 127 |
+
pypdf==5.1.0
|
docker-compose.yml
ADDED
|
@@ -0,0 +1,101 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
services:
|
| 3 |
+
db:
|
| 4 |
+
image: postgres:15
|
| 5 |
+
volumes:
|
| 6 |
+
- postgres_data:/var/lib/postgresql/data
|
| 7 |
+
environment:
|
| 8 |
+
POSTGRES_DB: educonnect_db
|
| 9 |
+
POSTGRES_USER: postgres
|
| 10 |
+
POSTGRES_PASSWORD: postgres
|
| 11 |
+
ports:
|
| 12 |
+
- "5433:5432"
|
| 13 |
+
|
| 14 |
+
redis:
|
| 15 |
+
image: redis:7-alpine
|
| 16 |
+
ports:
|
| 17 |
+
- "6380:6379"
|
| 18 |
+
|
| 19 |
+
backend:
|
| 20 |
+
build: ./backend
|
| 21 |
+
command: python manage.py runserver 0.0.0.0:8000
|
| 22 |
+
volumes:
|
| 23 |
+
- ./backend:/app
|
| 24 |
+
- media_volume:/app/media
|
| 25 |
+
- static_volume:/app/staticfiles
|
| 26 |
+
ports:
|
| 27 |
+
- "8000:8000"
|
| 28 |
+
env_file:
|
| 29 |
+
- ./backend/.env
|
| 30 |
+
environment:
|
| 31 |
+
- DEBUG=True
|
| 32 |
+
- DB_HOST=db
|
| 33 |
+
- DB_PORT=5432
|
| 34 |
+
- DB_NAME=educonnect_db
|
| 35 |
+
- DB_USER=postgres
|
| 36 |
+
- DB_PASSWORD=postgres
|
| 37 |
+
- REDIS_HOST=redis
|
| 38 |
+
- CELERY_BROKER_URL=redis://redis:6379/0
|
| 39 |
+
- CELERY_RESULT_BACKEND=redis://redis:6379/0
|
| 40 |
+
depends_on:
|
| 41 |
+
- db
|
| 42 |
+
- redis
|
| 43 |
+
|
| 44 |
+
celery:
|
| 45 |
+
build: ./backend
|
| 46 |
+
command: celery -A educonnect worker -l info
|
| 47 |
+
volumes:
|
| 48 |
+
- ./backend:/app
|
| 49 |
+
env_file:
|
| 50 |
+
- ./backend/.env
|
| 51 |
+
environment:
|
| 52 |
+
- DEBUG=True
|
| 53 |
+
- DB_HOST=db
|
| 54 |
+
- DB_PORT=5432
|
| 55 |
+
- DB_NAME=educonnect_db
|
| 56 |
+
- DB_USER=postgres
|
| 57 |
+
- DB_PASSWORD=postgres
|
| 58 |
+
- REDIS_HOST=redis
|
| 59 |
+
- CELERY_BROKER_URL=redis://redis:6379/0
|
| 60 |
+
- CELERY_RESULT_BACKEND=redis://redis:6379/0
|
| 61 |
+
depends_on:
|
| 62 |
+
- db
|
| 63 |
+
- redis
|
| 64 |
+
|
| 65 |
+
channels:
|
| 66 |
+
build: ./backend
|
| 67 |
+
command: daphne -b 0.0.0.0 -p 8001 educonnect.asgi:application
|
| 68 |
+
volumes:
|
| 69 |
+
- ./backend:/app
|
| 70 |
+
ports:
|
| 71 |
+
- "8001:8001"
|
| 72 |
+
env_file:
|
| 73 |
+
- ./backend/.env
|
| 74 |
+
environment:
|
| 75 |
+
- DEBUG=True
|
| 76 |
+
- DB_HOST=db
|
| 77 |
+
- DB_PORT=5432
|
| 78 |
+
- DB_NAME=educonnect_db
|
| 79 |
+
- DB_USER=postgres
|
| 80 |
+
- DB_PASSWORD=postgres
|
| 81 |
+
- REDIS_HOST=redis
|
| 82 |
+
depends_on:
|
| 83 |
+
- db
|
| 84 |
+
- redis
|
| 85 |
+
|
| 86 |
+
frontend:
|
| 87 |
+
build: ./frontend
|
| 88 |
+
volumes:
|
| 89 |
+
- ./frontend:/app
|
| 90 |
+
- /app/node_modules
|
| 91 |
+
ports:
|
| 92 |
+
- "5173:3000"
|
| 93 |
+
environment:
|
| 94 |
+
- VITE_API_URL=http://localhost:8000/api
|
| 95 |
+
depends_on:
|
| 96 |
+
- backend
|
| 97 |
+
|
| 98 |
+
volumes:
|
| 99 |
+
postgres_data:
|
| 100 |
+
media_volume:
|
| 101 |
+
static_volume:
|
ANALYTICS_SYSTEM.md → documentation/ANALYTICS_SYSTEM.md
RENAMED
|
File without changes
|
AVATAR_ERROR_FIX.md → documentation/AVATAR_ERROR_FIX.md
RENAMED
|
File without changes
|
AVATAR_UPLOAD_COMPLETE_FIX.md → documentation/AVATAR_UPLOAD_COMPLETE_FIX.md
RENAMED
|
File without changes
|
AVATAR_UPLOAD_FIX.md → documentation/AVATAR_UPLOAD_FIX.md
RENAMED
|
File without changes
|
BADGES_INTEGRATION.md → documentation/BADGES_INTEGRATION.md
RENAMED
|
File without changes
|
CHAT_BUG_FIX.md → documentation/CHAT_BUG_FIX.md
RENAMED
|
File without changes
|
DATABASE_LOCKED_SOLUTION.md → documentation/DATABASE_LOCKED_SOLUTION.md
RENAMED
|
File without changes
|
DEBOUNCE_EXPLAINED.md → documentation/DEBOUNCE_EXPLAINED.md
RENAMED
|
File without changes
|
DEBUGGING_AVAILABILITIES.md → documentation/DEBUGGING_AVAILABILITIES.md
RENAMED
|
File without changes
|
DEBUG_ERREURS.md → documentation/DEBUG_ERREURS.md
RENAMED
|
File without changes
|
documentation/ENCRYPTION.md
ADDED
|
@@ -0,0 +1,143 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Système de Chiffrement End-to-End - EduConnect
|
| 2 |
+
|
| 3 |
+
## Vue d'ensemble
|
| 4 |
+
|
| 5 |
+
Le système de chiffrement garantit que tous les messages entre utilisateurs sont chiffrés de bout en bout (E2E) en utilisant RSA-2048 et AES-256.
|
| 6 |
+
|
| 7 |
+
## Architecture
|
| 8 |
+
|
| 9 |
+
### Backend (Django)
|
| 10 |
+
|
| 11 |
+
1. **Génération automatique des clés** (`apps/users/encryption.py`)
|
| 12 |
+
- Génère une paire de clés RSA 2048 bits pour chaque utilisateur
|
| 13 |
+
- Clé publique : stockée en clair (partagée avec les autres)
|
| 14 |
+
- Clé privée : stockée (à chiffrer avec le mot de passe utilisateur en production)
|
| 15 |
+
|
| 16 |
+
2. **Signaux automatiques** (`apps/users/signals.py`)
|
| 17 |
+
- `generate_encryption_keys_on_profile_create` : Génère les clés à la création du profil
|
| 18 |
+
- `ensure_user_profile_has_keys` : Vérifie que chaque utilisateur a des clés
|
| 19 |
+
- Définit automatiquement `is_current=True` sur les profils
|
| 20 |
+
|
| 21 |
+
3. **Serializers** (`apps/users/serializers.py`)
|
| 22 |
+
- `UserProfileDetailSerializer` : Inclut `public_key` et `encrypted_private_key`
|
| 23 |
+
- `UserRegistrationSerializer` : S'assure que `is_current=True` lors de l'inscription
|
| 24 |
+
|
| 25 |
+
### Frontend (React/TypeScript)
|
| 26 |
+
|
| 27 |
+
1. **Service de chiffrement** (`services/encryption.ts`)
|
| 28 |
+
- Récupère les clés depuis le backend en priorité
|
| 29 |
+
- Génère localement uniquement en fallback
|
| 30 |
+
- Synchronise automatiquement avec le backend
|
| 31 |
+
|
| 32 |
+
2. **Initialisation automatique** (`context/AuthContext.tsx`)
|
| 33 |
+
- Les clés sont initialisées dès la connexion
|
| 34 |
+
- Pas besoin d'attendre l'ouverture du chat
|
| 35 |
+
|
| 36 |
+
3. **Chiffrement des messages** (`pages/Chat.tsx`)
|
| 37 |
+
- Vérifie que tous les participants ont des clés publiques
|
| 38 |
+
- Génère une clé AES unique par message
|
| 39 |
+
- Chiffre le contenu avec AES
|
| 40 |
+
- Chiffre la clé AES avec la clé publique RSA de chaque participant
|
| 41 |
+
|
| 42 |
+
## Commandes de maintenance
|
| 43 |
+
|
| 44 |
+
### Générer les clés pour tous les utilisateurs
|
| 45 |
+
```bash
|
| 46 |
+
python manage.py generate_encryption_keys
|
| 47 |
+
```
|
| 48 |
+
|
| 49 |
+
Options :
|
| 50 |
+
- `--force` : Régénère les clés même si elles existent
|
| 51 |
+
|
| 52 |
+
### Corriger les flags is_current
|
| 53 |
+
```bash
|
| 54 |
+
python manage.py fix_profile_flags
|
| 55 |
+
```
|
| 56 |
+
|
| 57 |
+
## Vérifications
|
| 58 |
+
|
| 59 |
+
### Vérifier que tous les profils ont des clés
|
| 60 |
+
```bash
|
| 61 |
+
python manage.py shell -c "
|
| 62 |
+
from apps.users.models import UserProfile
|
| 63 |
+
profiles = UserProfile.objects.filter(is_current=True)
|
| 64 |
+
print(f'Total: {profiles.count()}')
|
| 65 |
+
for p in profiles:
|
| 66 |
+
has_keys = bool(p.public_key and p.encrypted_private_key)
|
| 67 |
+
print(f'{p.user.email}: {\"✓\" if has_keys else \"✗\"} Clés')
|
| 68 |
+
"
|
| 69 |
+
```
|
| 70 |
+
|
| 71 |
+
### Vérifier is_current
|
| 72 |
+
```bash
|
| 73 |
+
python manage.py shell -c "
|
| 74 |
+
from apps.users.models import UserProfile
|
| 75 |
+
total = UserProfile.objects.count()
|
| 76 |
+
current = UserProfile.objects.filter(is_current=True).count()
|
| 77 |
+
print(f'Profils actifs: {current}/{total}')
|
| 78 |
+
"
|
| 79 |
+
```
|
| 80 |
+
|
| 81 |
+
## Flux de chiffrement
|
| 82 |
+
|
| 83 |
+
1. **Envoi d'un message**
|
| 84 |
+
```
|
| 85 |
+
User A → Génère clé AES → Chiffre message avec AES
|
| 86 |
+
→ Chiffre clé AES avec public_key de User B
|
| 87 |
+
→ Envoie {contenu_chiffré, clés_chiffrées}
|
| 88 |
+
```
|
| 89 |
+
|
| 90 |
+
2. **Réception d'un message**
|
| 91 |
+
```
|
| 92 |
+
User B → Reçoit message chiffré
|
| 93 |
+
→ Déchiffre la clé AES avec sa private_key
|
| 94 |
+
→ Déchiffre le contenu avec la clé AES
|
| 95 |
+
→ Affiche le message
|
| 96 |
+
```
|
| 97 |
+
|
| 98 |
+
## Sécurité
|
| 99 |
+
|
| 100 |
+
### Points forts
|
| 101 |
+
✅ Chiffrement RSA-2048 (clés asymétriques)
|
| 102 |
+
✅ Chiffrement AES-256-CBC (contenu des messages)
|
| 103 |
+
✅ Clé AES unique par message
|
| 104 |
+
✅ Génération automatique des clés
|
| 105 |
+
✅ Clés stockées côté serveur (backup)
|
| 106 |
+
|
| 107 |
+
### Améliorations futures
|
| 108 |
+
⚠️ Chiffrer la clé privée avec le mot de passe utilisateur
|
| 109 |
+
⚠️ Implémenter la rotation des clés
|
| 110 |
+
⚠️ Ajouter un système de récupération de clés
|
| 111 |
+
⚠️ Perfect Forward Secrecy (PFS)
|
| 112 |
+
|
| 113 |
+
## Dépannage
|
| 114 |
+
|
| 115 |
+
### "Message non chiffré (Destinataire sans clés)"
|
| 116 |
+
|
| 117 |
+
**Causes possibles :**
|
| 118 |
+
1. Le profil n'a pas `is_current=True`
|
| 119 |
+
- Solution : `python manage.py fix_profile_flags`
|
| 120 |
+
|
| 121 |
+
2. Les clés n'ont pas été générées
|
| 122 |
+
- Solution : `python manage.py generate_encryption_keys`
|
| 123 |
+
|
| 124 |
+
3. Les clés ne sont pas renvoyées par l'API
|
| 125 |
+
- Vérifier que `UserProfileDetailSerializer` inclut les clés
|
| 126 |
+
- Vérifier le mapping dans `services/messaging.ts`
|
| 127 |
+
|
| 128 |
+
### Logs de débogage
|
| 129 |
+
|
| 130 |
+
Dans la console du navigateur, lors de l'envoi d'un message :
|
| 131 |
+
```
|
| 132 |
+
🔐 Vérification du chiffrement:
|
| 133 |
+
- Mes clés: Présentes
|
| 134 |
+
- Autres participants: 1
|
| 135 |
+
- Participant 1: [Nom]
|
| 136 |
+
- public_key: Présente ✓
|
| 137 |
+
- Chiffrement possible: OUI ✓
|
| 138 |
+
```
|
| 139 |
+
|
| 140 |
+
Si "NON ✗" apparaît, vérifier que le backend renvoie bien les clés.
|
| 141 |
+
|
| 142 |
+
## Développé par
|
| 143 |
+
Marino ATOHOUN pour Hypee - EduConnect Africa
|
ENCRYPTION_FEATURE.md → documentation/ENCRYPTION_FEATURE.md
RENAMED
|
File without changes
|
documentation/EXTERNAL_TOOLS.md
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Outils Externes - EduConnect
|
| 2 |
+
|
| 3 |
+
## Laboratoire Chimique Virtuel
|
| 4 |
+
|
| 5 |
+
Le Laboratoire Chimique est déployé séparément sur Vercel et accessible via un lien externe.
|
| 6 |
+
|
| 7 |
+
### Accès
|
| 8 |
+
- **URL** : https://virtual-labo-chimique.vercel.app/
|
| 9 |
+
- **Depuis EduConnect** : Outils → Laboratoire Chimique → S'ouvre dans un nouvel onglet
|
| 10 |
+
|
| 11 |
+
### Configuration
|
| 12 |
+
|
| 13 |
+
Le lien est configuré dans `frontend/pages/LearningTools.tsx` :
|
| 14 |
+
|
| 15 |
+
```typescript
|
| 16 |
+
else if (toolId === 'chem') {
|
| 17 |
+
window.open('https://virtual-labo-chimique.vercel.app/', '_blank');
|
| 18 |
+
}
|
| 19 |
+
```
|
| 20 |
+
|
| 21 |
+
### Modification du lien
|
| 22 |
+
|
| 23 |
+
Pour changer l'URL du laboratoire :
|
| 24 |
+
|
| 25 |
+
1. Ouvrir `frontend/pages/LearningTools.tsx`
|
| 26 |
+
2. Chercher `toolId === 'chem'`
|
| 27 |
+
3. Modifier l'URL dans `window.open()`
|
| 28 |
+
|
| 29 |
+
### Base de données
|
| 30 |
+
|
| 31 |
+
L'outil est enregistré dans la table `learning_tools` avec :
|
| 32 |
+
- `tool_id` : `'chem'`
|
| 33 |
+
- `title` : `'Laboratoire Chimique'`
|
| 34 |
+
- `status` : `'available'`
|
| 35 |
+
|
| 36 |
+
## Autres outils externes
|
| 37 |
+
|
| 38 |
+
Pour ajouter d'autres outils externes, suivre le même pattern :
|
| 39 |
+
|
| 40 |
+
```typescript
|
| 41 |
+
else if (toolId === 'mon_outil') {
|
| 42 |
+
window.open('https://mon-outil.com/', '_blank');
|
| 43 |
+
}
|
| 44 |
+
```
|
| 45 |
+
|
| 46 |
+
---
|
| 47 |
+
Développé par Marino ATOHOUN pour Hypee - EduConnect Africa
|
FORMULES_GUIDE.md → documentation/FORMULES_GUIDE.md
RENAMED
|
File without changes
|
GEMINI_SETUP.md → documentation/GEMINI_SETUP.md
RENAMED
|
File without changes
|