Spaces:
Sleeping
Sleeping
Antigravity commited on
Commit ·
2d802f0
0
Parent(s):
Stable deployment version: Lazy loading and Docker optimized
Browse files- .gitattributes +3 -0
- .gitignore +10 -0
- Dockerfile +31 -0
- Procfile +1 -0
- README.md +30 -0
- app.py +449 -0
- models/gender_model.pth +3 -0
- models_loader.py +159 -0
- reproduction_apriori.py +59 -0
- reproduction_apriori_v2.py +54 -0
- requirements.txt +20 -0
- scripts/setup_env.ps1 +14 -0
- static/audio/question.wav +0 -0
- static/audio/recording.wav +0 -0
- static/css/main.css +184 -0
- static/css/style.css +461 -0
- static/style.css +192 -0
- templates/apriori.html +209 -0
- templates/clustering.html +185 -0
- templates/dbscan.html +168 -0
- templates/gender.html +175 -0
- templates/generate.html +69 -0
- templates/image.html +95 -0
- templates/index.html +147 -0
- templates/layout.html +111 -0
- templates/qa.html +262 -0
- templates/sentiment.html +210 -0
- templates/textgen.html +97 -0
- templates/translate.html +112 -0
- templates/zsl.html +177 -0
- train_cnn.py +123 -0
- verify_extended.py +58 -0
- verify_models.py +68 -0
.gitattributes
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
templates/**/*.html linguist-vendored
|
| 2 |
+
static/**/* linguist-vendored
|
| 3 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
.gitignore
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
.venv/
|
| 2 |
+
__pycache__/
|
| 3 |
+
*.pyc
|
| 4 |
+
static/uploads/*
|
| 5 |
+
!static/uploads/.gitkeep
|
| 6 |
+
# models/*.pth
|
| 7 |
+
.vscode/
|
| 8 |
+
pip_list.txt
|
| 9 |
+
*.mp3
|
| 10 |
+
dataset/
|
Dockerfile
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Use an official Python runtime as a parent image
|
| 2 |
+
FROM python:3.10-slim
|
| 3 |
+
|
| 4 |
+
# Set the working directory in the container
|
| 5 |
+
WORKDIR /app
|
| 6 |
+
|
| 7 |
+
# Install system dependencies
|
| 8 |
+
RUN apt-get update && apt-get install -y \
|
| 9 |
+
build-essential \
|
| 10 |
+
libasound2-dev \
|
| 11 |
+
portaudio19-dev \
|
| 12 |
+
libportaudio2 \
|
| 13 |
+
libportaudiocpp0 \
|
| 14 |
+
ffmpeg \
|
| 15 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 16 |
+
|
| 17 |
+
# Copy the requirements file into the container
|
| 18 |
+
COPY requirements.txt .
|
| 19 |
+
|
| 20 |
+
# Install dependencies
|
| 21 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
| 22 |
+
|
| 23 |
+
# Copy the rest of the application code
|
| 24 |
+
COPY . .
|
| 25 |
+
|
| 26 |
+
# Expose the port the app runs on (Hugging Face uses 7860)
|
| 27 |
+
EXPOSE 7860
|
| 28 |
+
|
| 29 |
+
# Command to run the application with increased timeout for model loading
|
| 30 |
+
# We use 0.0.0.0 to make it accessible outside the container
|
| 31 |
+
CMD ["gunicorn", "--bind", "0.0.0.0:7860", "--timeout", "300", "--workers", "1", "app:app"]
|
Procfile
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
web: gunicorn app:app
|
README.md
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
title: AML Project - AI Services Hub
|
| 3 |
+
emoji: 🤖
|
| 4 |
+
colorFrom: blue
|
| 5 |
+
colorTo: purple
|
| 6 |
+
sdk: docker
|
| 7 |
+
app_port: 7860
|
| 8 |
+
pinned: false
|
| 9 |
+
---
|
| 10 |
+
|
| 11 |
+
# AI Services Hub
|
| 12 |
+
|
| 13 |
+
A comprehensive Flask-based platform integrating diverse AI models for Image Classification, NLP (Translation, Text Gen, Sentiment), Audio Intelligence (Voice STT/TTS), and Data Mining (Clustering & Association Rules).
|
| 14 |
+
|
| 15 |
+
## Features
|
| 16 |
+
|
| 17 |
+
- **Gender Classification**: Custom PyTorch CNN model
|
| 18 |
+
- **Text Generation**: GPT-2 powered text completion
|
| 19 |
+
- **Translation**: English to Urdu translation
|
| 20 |
+
- **Sentiment Analysis**: Voice-enabled sentiment detection
|
| 21 |
+
- **Question Answering**: Voice-to-voice QA system
|
| 22 |
+
- **Zero-Shot Classification**: Custom label classification
|
| 23 |
+
- **Clustering**: K-Means and DBSCAN algorithms
|
| 24 |
+
- **Association Rules**: A-priori market basket analysis
|
| 25 |
+
|
| 26 |
+
## Tech Stack
|
| 27 |
+
|
| 28 |
+
- Flask, PyTorch, Hugging Face Transformers
|
| 29 |
+
- Scikit-learn, Pandas, Matplotlib
|
| 30 |
+
- OpenAI Whisper, gTTS, Librosa
|
app.py
ADDED
|
@@ -0,0 +1,449 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from flask import Flask, render_template, request
|
| 2 |
+
from werkzeug.utils import secure_filename
|
| 3 |
+
import os
|
| 4 |
+
import numpy as np
|
| 5 |
+
from PIL import Image
|
| 6 |
+
from transformers import pipeline
|
| 7 |
+
from gtts import gTTS
|
| 8 |
+
import speech_recognition as sr
|
| 9 |
+
import librosa
|
| 10 |
+
|
| 11 |
+
# Try importing tensorflow, handle if missing
|
| 12 |
+
try:
|
| 13 |
+
from tensorflow.keras.models import load_model
|
| 14 |
+
except ImportError:
|
| 15 |
+
load_model = None
|
| 16 |
+
|
| 17 |
+
app = Flask(__name__)
|
| 18 |
+
app.config['UPLOAD_FOLDER'] = 'static/uploads'
|
| 19 |
+
os.makedirs(app.config['UPLOAD_FOLDER'], exist_ok=True)
|
| 20 |
+
|
| 21 |
+
# Dataset paths (Colab format - update these for local use)
|
| 22 |
+
TRAIN_DIR = "/content/drive/MyDrive/AML-F24/Code/image_datset/image_datset/train"
|
| 23 |
+
TEST_DIR = "/content/drive/MyDrive/AML-F24/Code/image_datset/image_datset/test"
|
| 24 |
+
|
| 25 |
+
# ---------------- MODELS ---------------- #
|
| 26 |
+
from models_loader import loader
|
| 27 |
+
|
| 28 |
+
sentiment_model = loader.sentiment_pipeline
|
| 29 |
+
qa_model = loader.qa_pipeline
|
| 30 |
+
textgen_model = loader.text_gen_pipeline
|
| 31 |
+
translator = loader.translator_pipeline
|
| 32 |
+
stt_model = loader.stt_pipeline
|
| 33 |
+
zsl_model = loader.zsl_pipeline
|
| 34 |
+
gender_classifier = loader.gender_classifier
|
| 35 |
+
gender_model = loader.cnn_model # Custom CNN
|
| 36 |
+
|
| 37 |
+
# Clustering Dependencies
|
| 38 |
+
import pandas as pd
|
| 39 |
+
from sklearn.cluster import KMeans, DBSCAN
|
| 40 |
+
import matplotlib
|
| 41 |
+
matplotlib.use('Agg') # Non-interactive backend
|
| 42 |
+
import matplotlib.pyplot as plt
|
| 43 |
+
import io
|
| 44 |
+
import base64
|
| 45 |
+
from sklearn.preprocessing import StandardScaler
|
| 46 |
+
|
| 47 |
+
# Association Rules Dependencies
|
| 48 |
+
from mlxtend.frequent_patterns import apriori, association_rules
|
| 49 |
+
from mlxtend.preprocessing import TransactionEncoder
|
| 50 |
+
|
| 51 |
+
# ---------------- ROUTES ---------------- #
|
| 52 |
+
|
| 53 |
+
@app.route('/')
|
| 54 |
+
def index():
|
| 55 |
+
return render_template('index.html')
|
| 56 |
+
|
| 57 |
+
# -------- GENDER CLASSIFICATION -------- #
|
| 58 |
+
@app.route('/gender', methods=['GET', 'POST'])
|
| 59 |
+
def gender():
|
| 60 |
+
result = ""
|
| 61 |
+
if request.method == 'POST':
|
| 62 |
+
if 'image' not in request.files:
|
| 63 |
+
return render_template('gender.html', result="No image uploaded")
|
| 64 |
+
|
| 65 |
+
file = request.files['image']
|
| 66 |
+
if file.filename == '':
|
| 67 |
+
return render_template('gender.html', result="No image selected")
|
| 68 |
+
|
| 69 |
+
if file:
|
| 70 |
+
# Save file temporarily (use secure filename)
|
| 71 |
+
filename = secure_filename(file.filename)
|
| 72 |
+
filepath = os.path.join(app.config['UPLOAD_FOLDER'], filename)
|
| 73 |
+
file.save(filepath)
|
| 74 |
+
|
| 75 |
+
if gender_classifier:
|
| 76 |
+
try:
|
| 77 |
+
img = Image.open(filepath)
|
| 78 |
+
results = gender_classifier(img)
|
| 79 |
+
# Extract the top result
|
| 80 |
+
result = results[0]['label'].capitalize()
|
| 81 |
+
print(f"Gender Classification Result: {result}")
|
| 82 |
+
except Exception as e:
|
| 83 |
+
result = f"Error processing image with transformers: {e}"
|
| 84 |
+
elif gender_model:
|
| 85 |
+
try:
|
| 86 |
+
img = Image.open(filepath).convert('RGB')
|
| 87 |
+
img = img.resize((128, 128))
|
| 88 |
+
img_array = np.array(img).astype(np.float32) / 255.0
|
| 89 |
+
# Correct shape for PyTorch CNN: (batch, channels, height, width)
|
| 90 |
+
img_tensor = torch.from_numpy(img_array).permute(2, 0, 1).unsqueeze(0)
|
| 91 |
+
|
| 92 |
+
with torch.no_grad():
|
| 93 |
+
prediction = gender_model(img_tensor)
|
| 94 |
+
result = "Male" if prediction.item() > 0.5 else "Female"
|
| 95 |
+
except Exception as e:
|
| 96 |
+
import traceback
|
| 97 |
+
print(traceback.format_exc())
|
| 98 |
+
result = f"Error processing image: {e}"
|
| 99 |
+
else:
|
| 100 |
+
result = "Gender model is not loaded (check console for details)."
|
| 101 |
+
|
| 102 |
+
return render_template('gender.html', result=result)
|
| 103 |
+
|
| 104 |
+
# -------- TEXT GENERATION -------- #
|
| 105 |
+
@app.route('/textgen', methods=['GET', 'POST'])
|
| 106 |
+
def textgen():
|
| 107 |
+
result = ""
|
| 108 |
+
if request.method == 'POST':
|
| 109 |
+
text = request.form['prompt']
|
| 110 |
+
if textgen_model:
|
| 111 |
+
result = textgen_model(text, max_length=50)[0]['generated_text']
|
| 112 |
+
else:
|
| 113 |
+
result = "Text generation model not available"
|
| 114 |
+
return render_template('textgen.html', generated_text=result)
|
| 115 |
+
|
| 116 |
+
# -------- TRANSLATION -------- #
|
| 117 |
+
@app.route('/translate', methods=['GET', 'POST'])
|
| 118 |
+
def translate():
|
| 119 |
+
result = ""
|
| 120 |
+
if request.method == 'POST':
|
| 121 |
+
text = request.form.get('text', '')
|
| 122 |
+
if translator:
|
| 123 |
+
result = translator(text)[0]['translation_text']
|
| 124 |
+
else:
|
| 125 |
+
result = "Translation model not available"
|
| 126 |
+
return render_template('translate.html', translated_text=result)
|
| 127 |
+
|
| 128 |
+
# -------- SENTIMENT (VOICE) -------- #
|
| 129 |
+
@app.route('/sentiment', methods=['GET', 'POST'])
|
| 130 |
+
def sentiment():
|
| 131 |
+
result = ""
|
| 132 |
+
if request.method == 'POST':
|
| 133 |
+
typed_text = request.form.get('text', '').strip()
|
| 134 |
+
audio_file = request.files.get('voice')
|
| 135 |
+
|
| 136 |
+
text = ""
|
| 137 |
+
if typed_text:
|
| 138 |
+
text = typed_text
|
| 139 |
+
elif audio_file:
|
| 140 |
+
if audio_file.filename == '':
|
| 141 |
+
return render_template('sentiment.html', result="No audio selected")
|
| 142 |
+
audio_filename = secure_filename(audio_file.filename)
|
| 143 |
+
audio_path = os.path.join(app.config['UPLOAD_FOLDER'], audio_filename)
|
| 144 |
+
audio_file.save(audio_path)
|
| 145 |
+
|
| 146 |
+
if stt_model is None:
|
| 147 |
+
return render_template('sentiment.html', result="STT model not available")
|
| 148 |
+
|
| 149 |
+
try:
|
| 150 |
+
audio_array, sampling_rate = librosa.load(audio_path, sr=16000)
|
| 151 |
+
# Ensure the audio array is 1D and float32
|
| 152 |
+
audio_array = audio_array.astype(np.float32)
|
| 153 |
+
stt_result = stt_model(audio_array)
|
| 154 |
+
text = stt_result.get('text', '').strip()
|
| 155 |
+
if not text:
|
| 156 |
+
return render_template('sentiment.html', result="Could not understand audio")
|
| 157 |
+
except Exception as e:
|
| 158 |
+
return render_template('sentiment.html', result=f"STT processing error: {str(e)}")
|
| 159 |
+
else:
|
| 160 |
+
return render_template('sentiment.html', result="No input provided")
|
| 161 |
+
|
| 162 |
+
# Sentiment Analysis Logic
|
| 163 |
+
if sentiment_model is None:
|
| 164 |
+
result = f"Analyzed: {text} | Status: Sentiment model not available"
|
| 165 |
+
else:
|
| 166 |
+
try:
|
| 167 |
+
sentiment_data = sentiment_model(text)[0]
|
| 168 |
+
label = sentiment_data.get('label', 'Unknown').capitalize()
|
| 169 |
+
score = round(sentiment_data.get('score', 0) * 100, 1)
|
| 170 |
+
|
| 171 |
+
# Question Detection
|
| 172 |
+
questions_words = ["who", "what", "where", "when", "why", "how", "is", "are", "do", "does", "can", "could", "would", "should"]
|
| 173 |
+
is_question = text.strip().endswith("?") or any(text.lower().startswith(q + " ") for q in questions_words)
|
| 174 |
+
|
| 175 |
+
type_str = "Question" if is_question else "Statement"
|
| 176 |
+
result = f"Text: \"{text}\" | Type: {type_str} | Sentiment: {label} (Confidence: {score}%)"
|
| 177 |
+
except Exception as e:
|
| 178 |
+
result = f"Sentiment analysis failed: {str(e)}"
|
| 179 |
+
|
| 180 |
+
return render_template('sentiment.html', result=result)
|
| 181 |
+
|
| 182 |
+
# -------- QUESTION ANSWERING (VOICE → VOICE) -------- #
|
| 183 |
+
@app.route('/qa', methods=['GET', 'POST'])
|
| 184 |
+
def qa():
|
| 185 |
+
answer = ""
|
| 186 |
+
context = ""
|
| 187 |
+
question_text = ""
|
| 188 |
+
if request.method == 'POST':
|
| 189 |
+
context = request.form.get('context', '')
|
| 190 |
+
audio_file = request.files.get('voice')
|
| 191 |
+
typed_question = request.form.get('question', '').strip()
|
| 192 |
+
|
| 193 |
+
if typed_question:
|
| 194 |
+
question_text = typed_question
|
| 195 |
+
elif audio_file:
|
| 196 |
+
if audio_file.filename != '':
|
| 197 |
+
audio_filename = secure_filename(audio_file.filename)
|
| 198 |
+
audio_path = os.path.join(app.config['UPLOAD_FOLDER'], audio_filename)
|
| 199 |
+
audio_file.save(audio_path)
|
| 200 |
+
|
| 201 |
+
try:
|
| 202 |
+
audio_array, sampling_rate = librosa.load(audio_path, sr=16000)
|
| 203 |
+
audio_array = audio_array.astype(np.float32)
|
| 204 |
+
stt_result = stt_model(audio_array)
|
| 205 |
+
question_text = stt_result.get('text', '').strip()
|
| 206 |
+
except Exception as e:
|
| 207 |
+
print(f"STT Error in QA: {e}")
|
| 208 |
+
answer = "Error processing your voice question."
|
| 209 |
+
|
| 210 |
+
if not answer and question_text and context:
|
| 211 |
+
if qa_model is None:
|
| 212 |
+
answer = "Question-answering model not available"
|
| 213 |
+
else:
|
| 214 |
+
try:
|
| 215 |
+
result = qa_model(question=question_text, context=context)
|
| 216 |
+
answer = result.get('answer', str(result))
|
| 217 |
+
|
| 218 |
+
try:
|
| 219 |
+
tts = gTTS(answer)
|
| 220 |
+
tts.save(os.path.join('static', 'answer.mp3'))
|
| 221 |
+
except Exception as e:
|
| 222 |
+
print(f"TTS failed: {e}")
|
| 223 |
+
except Exception as e:
|
| 224 |
+
answer = f"QA model error: {e}"
|
| 225 |
+
elif not answer:
|
| 226 |
+
answer = "Please provide both context and a question (typed or voice)."
|
| 227 |
+
|
| 228 |
+
return render_template('qa.html', answer=answer, context=context, question=question_text)
|
| 229 |
+
|
| 230 |
+
# -------- ZERO-SHOT LEARNING -------- #
|
| 231 |
+
@app.route('/zsl', methods=['GET', 'POST'])
|
| 232 |
+
def zsl():
|
| 233 |
+
result = None
|
| 234 |
+
if request.method == 'POST':
|
| 235 |
+
text = request.form.get('text', '')
|
| 236 |
+
labels = request.form.get('labels', '')
|
| 237 |
+
|
| 238 |
+
if not text or not labels:
|
| 239 |
+
return render_template('zsl.html', error="Both text and labels are required.")
|
| 240 |
+
|
| 241 |
+
candidate_labels = [l.strip() for l in labels.split(',') if l.strip()]
|
| 242 |
+
|
| 243 |
+
if zsl_model is None:
|
| 244 |
+
return render_template('zsl.html', error="ZSL model not available.")
|
| 245 |
+
|
| 246 |
+
try:
|
| 247 |
+
output = zsl_model(text, candidate_labels=candidate_labels)
|
| 248 |
+
# Find the index of the label with the highest score
|
| 249 |
+
best_idx = np.argmax(output['scores'])
|
| 250 |
+
result = {
|
| 251 |
+
'label': output['labels'][0], # BART-MNLI returns sorted
|
| 252 |
+
'score': round(output['scores'][0] * 100, 2),
|
| 253 |
+
'all_results': zip(output['labels'], [round(s * 100, 2) for s in output['scores']])
|
| 254 |
+
}
|
| 255 |
+
except Exception as e:
|
| 256 |
+
return render_template('zsl.html', error=f"ZSL error: {str(e)}")
|
| 257 |
+
|
| 258 |
+
return render_template('zsl.html', result=result)
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
# -------- K-MEANS CLUSTERING -------- #
|
| 262 |
+
@app.route('/clustering', methods=['GET', 'POST'])
|
| 263 |
+
def clustering():
|
| 264 |
+
plot_url = None
|
| 265 |
+
cluster_info = None
|
| 266 |
+
if request.method == 'POST':
|
| 267 |
+
if 'file' not in request.files:
|
| 268 |
+
return render_template('clustering.html', error="No file uploaded")
|
| 269 |
+
|
| 270 |
+
file = request.files['file']
|
| 271 |
+
n_clusters = int(request.form.get('clusters', 3))
|
| 272 |
+
|
| 273 |
+
if file.filename == '':
|
| 274 |
+
return render_template('clustering.html', error="No file selected")
|
| 275 |
+
|
| 276 |
+
try:
|
| 277 |
+
if file.filename.endswith('.csv'):
|
| 278 |
+
df = pd.read_csv(file)
|
| 279 |
+
else:
|
| 280 |
+
df = pd.read_excel(file)
|
| 281 |
+
|
| 282 |
+
# Keep only numeric columns
|
| 283 |
+
numeric_df = df.select_dtypes(include=[np.number])
|
| 284 |
+
|
| 285 |
+
if numeric_df.shape[1] < 2:
|
| 286 |
+
return render_template('clustering.html', error="Dataset must have at least 2 numeric columns for clustering.")
|
| 287 |
+
|
| 288 |
+
# Basic cleaning
|
| 289 |
+
numeric_df = numeric_df.dropna()
|
| 290 |
+
|
| 291 |
+
# K-Means
|
| 292 |
+
kmeans = KMeans(n_clusters=n_clusters, random_state=42)
|
| 293 |
+
df['Cluster'] = kmeans.fit_predict(numeric_df)
|
| 294 |
+
|
| 295 |
+
# Create Plot (using first two numeric columns)
|
| 296 |
+
plt.figure(figsize=(10, 6))
|
| 297 |
+
scatter = plt.scatter(numeric_df.iloc[:, 0], numeric_df.iloc[:, 1], c=df['Cluster'], cmap='viridis', alpha=0.6)
|
| 298 |
+
plt.colorbar(scatter, label='Cluster')
|
| 299 |
+
plt.title(f'K-Means Clustering (K={n_clusters})')
|
| 300 |
+
plt.xlabel(numeric_df.columns[0])
|
| 301 |
+
plt.ylabel(numeric_df.columns[1])
|
| 302 |
+
plt.grid(True, alpha=0.3)
|
| 303 |
+
|
| 304 |
+
# Save plot to base64
|
| 305 |
+
img = io.BytesIO()
|
| 306 |
+
plt.savefig(img, format='png', bbox_inches='tight', transparent=True)
|
| 307 |
+
img.seek(0)
|
| 308 |
+
plot_url = base64.b64encode(img.getvalue()).decode()
|
| 309 |
+
plt.close()
|
| 310 |
+
|
| 311 |
+
# Cluster stats
|
| 312 |
+
cluster_info = df.groupby('Cluster').size().to_dict()
|
| 313 |
+
|
| 314 |
+
except Exception as e:
|
| 315 |
+
return render_template('clustering.html', error=f"Clustering error: {str(e)}")
|
| 316 |
+
|
| 317 |
+
return render_template('clustering.html', plot_url=plot_url, cluster_info=cluster_info)
|
| 318 |
+
|
| 319 |
+
# -------- DBSCAN CLUSTERING -------- #
|
| 320 |
+
@app.route('/dbscan', methods=['GET', 'POST'])
|
| 321 |
+
def dbscan():
|
| 322 |
+
plot_url = None
|
| 323 |
+
cluster_info = None
|
| 324 |
+
if request.method == 'POST':
|
| 325 |
+
if 'file' not in request.files:
|
| 326 |
+
return render_template('dbscan.html', error="No file uploaded")
|
| 327 |
+
|
| 328 |
+
file = request.files['file']
|
| 329 |
+
eps = float(request.form.get('eps', 0.5))
|
| 330 |
+
min_samples = int(request.form.get('min_samples', 5))
|
| 331 |
+
|
| 332 |
+
if file.filename == '':
|
| 333 |
+
return render_template('dbscan.html', error="No file selected")
|
| 334 |
+
|
| 335 |
+
try:
|
| 336 |
+
if file.filename.endswith('.csv'):
|
| 337 |
+
df = pd.read_csv(file)
|
| 338 |
+
else:
|
| 339 |
+
df = pd.read_excel(file)
|
| 340 |
+
|
| 341 |
+
# Keep only numeric columns
|
| 342 |
+
numeric_df = df.select_dtypes(include=[np.number])
|
| 343 |
+
|
| 344 |
+
if numeric_df.shape[1] < 2:
|
| 345 |
+
return render_template('dbscan.html', error="Dataset must have at least 2 numeric columns for clustering.")
|
| 346 |
+
|
| 347 |
+
# Basic cleaning
|
| 348 |
+
numeric_df = numeric_df.dropna()
|
| 349 |
+
|
| 350 |
+
# DBSCAN with Scaling
|
| 351 |
+
scaler = StandardScaler()
|
| 352 |
+
scaled_data = scaler.fit_transform(numeric_df)
|
| 353 |
+
|
| 354 |
+
dbscan_model = DBSCAN(eps=eps, min_samples=min_samples)
|
| 355 |
+
df['Cluster'] = dbscan_model.fit_predict(scaled_data)
|
| 356 |
+
|
| 357 |
+
# Create Plot
|
| 358 |
+
plt.figure(figsize=(10, 6))
|
| 359 |
+
scatter = plt.scatter(numeric_df.iloc[:, 0], numeric_df.iloc[:, 1], c=df['Cluster'], cmap='viridis', alpha=0.6)
|
| 360 |
+
plt.colorbar(scatter, label='Cluster')
|
| 361 |
+
plt.title(f'DBSCAN Clustering (eps={eps}, min_samples={min_samples}) - Scaled')
|
| 362 |
+
plt.xlabel(numeric_df.columns[0])
|
| 363 |
+
plt.ylabel(numeric_df.columns[1])
|
| 364 |
+
plt.grid(True, alpha=0.3)
|
| 365 |
+
|
| 366 |
+
# Save plot to base64
|
| 367 |
+
img = io.BytesIO()
|
| 368 |
+
plt.savefig(img, format='png', bbox_inches='tight', transparent=True)
|
| 369 |
+
img.seek(0)
|
| 370 |
+
plot_url = base64.b64encode(img.getvalue()).decode()
|
| 371 |
+
plt.close()
|
| 372 |
+
|
| 373 |
+
# Cluster stats
|
| 374 |
+
cluster_info = df.groupby('Cluster').size().to_dict()
|
| 375 |
+
|
| 376 |
+
except Exception as e:
|
| 377 |
+
return render_template('dbscan.html', error=f"DBSCAN error: {str(e)}")
|
| 378 |
+
|
| 379 |
+
return render_template('dbscan.html', plot_url=plot_url, cluster_info=cluster_info)
|
| 380 |
+
|
| 381 |
+
# -------- A-PRIORI ASSOCIATION RULES -------- #
|
| 382 |
+
@app.route('/apriori', methods=['GET', 'POST'])
|
| 383 |
+
def apriori_route():
|
| 384 |
+
rules_html = None
|
| 385 |
+
if request.method == 'POST':
|
| 386 |
+
if 'file' not in request.files:
|
| 387 |
+
return render_template('apriori.html', error="No file uploaded")
|
| 388 |
+
|
| 389 |
+
file = request.files['file']
|
| 390 |
+
min_support = float(request.form.get('min_support', 0.1))
|
| 391 |
+
min_threshold = float(request.form.get('min_threshold', 0.7))
|
| 392 |
+
metric = request.form.get('metric', 'lift')
|
| 393 |
+
has_header = request.form.get('has_header') == 'on'
|
| 394 |
+
|
| 395 |
+
if file.filename == '':
|
| 396 |
+
return render_template('apriori.html', error="No file selected")
|
| 397 |
+
|
| 398 |
+
try:
|
| 399 |
+
if file.filename.endswith('.csv'):
|
| 400 |
+
df = pd.read_csv(file, header=0 if has_header else None)
|
| 401 |
+
else:
|
| 402 |
+
df = pd.read_excel(file, header=0 if has_header else None)
|
| 403 |
+
|
| 404 |
+
# Convert to list of lists (transactions) - Handle nulls and whitespace
|
| 405 |
+
transactions = []
|
| 406 |
+
values = df.values.tolist()
|
| 407 |
+
for row in values:
|
| 408 |
+
# Filter out nan, None, and empty strings, and convert everything to string
|
| 409 |
+
transaction = sorted(list(set([str(item).strip() for item in row if pd.notna(item) and str(item).strip() != ''])))
|
| 410 |
+
if transaction:
|
| 411 |
+
transactions.append(transaction)
|
| 412 |
+
|
| 413 |
+
if not transactions:
|
| 414 |
+
return render_template('apriori.html', error="No valid transactions found in file.")
|
| 415 |
+
|
| 416 |
+
# Transaction Encoding
|
| 417 |
+
te = TransactionEncoder()
|
| 418 |
+
te_ary = te.fit(transactions).transform(transactions)
|
| 419 |
+
encoded_df = pd.DataFrame(te_ary, columns=te.columns_)
|
| 420 |
+
|
| 421 |
+
# Generate Frequent Itemsets
|
| 422 |
+
frequent_itemsets = apriori(encoded_df, min_support=min_support, use_colnames=True)
|
| 423 |
+
|
| 424 |
+
if frequent_itemsets.empty:
|
| 425 |
+
return render_template('apriori.html', error="No frequent itemsets found. Try lowering min support.")
|
| 426 |
+
|
| 427 |
+
# Generate Rules
|
| 428 |
+
rules = association_rules(frequent_itemsets, metric=metric, min_threshold=min_threshold)
|
| 429 |
+
|
| 430 |
+
if rules.empty:
|
| 431 |
+
return render_template('apriori.html', error=f"No rules found for {metric} >= {min_threshold}. Try lowering threshold.")
|
| 432 |
+
|
| 433 |
+
# Format rules for display
|
| 434 |
+
rules['antecedents'] = rules['antecedents'].apply(lambda x: list(x))
|
| 435 |
+
rules['consequents'] = rules['consequents'].apply(lambda x: list(x))
|
| 436 |
+
|
| 437 |
+
# Selection of columns for display
|
| 438 |
+
display_rules = rules[['antecedents', 'consequents', 'support', 'confidence', 'lift']]
|
| 439 |
+
rules_html = display_rules.to_dict(orient='records')
|
| 440 |
+
|
| 441 |
+
except Exception as e:
|
| 442 |
+
import traceback
|
| 443 |
+
print(traceback.format_exc())
|
| 444 |
+
return render_template('apriori.html', error=f"A-priori error: {str(e)}")
|
| 445 |
+
|
| 446 |
+
return render_template('apriori.html', rules=rules_html)
|
| 447 |
+
|
| 448 |
+
if __name__ == '__main__':
|
| 449 |
+
app.run(debug=True, use_reloader=False)
|
models/gender_model.pth
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:eecaa25fc7866baa1f67478d0785f17621d9f1eb00bf29abe94a985617a92e62
|
| 3 |
+
size 16859283
|
models_loader.py
ADDED
|
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from transformers import pipeline
|
| 2 |
+
import torch
|
| 3 |
+
import torch.nn as nn
|
| 4 |
+
import os
|
| 5 |
+
import numpy as np
|
| 6 |
+
from PIL import Image
|
| 7 |
+
|
| 8 |
+
class GenderCNN(nn.Module):
|
| 9 |
+
def __init__(self):
|
| 10 |
+
super(GenderCNN, self).__init__()
|
| 11 |
+
self.conv_layers = nn.Sequential(
|
| 12 |
+
nn.Conv2d(3, 32, kernel_size=3, padding=1),
|
| 13 |
+
nn.ReLU(),
|
| 14 |
+
nn.BatchNorm2d(32),
|
| 15 |
+
nn.MaxPool2d(2, 2),
|
| 16 |
+
nn.Conv2d(32, 64, kernel_size=3, padding=1),
|
| 17 |
+
nn.ReLU(),
|
| 18 |
+
nn.BatchNorm2d(64),
|
| 19 |
+
nn.MaxPool2d(2, 2),
|
| 20 |
+
nn.Conv2d(64, 128, kernel_size=3, padding=1),
|
| 21 |
+
nn.ReLU(),
|
| 22 |
+
nn.BatchNorm2d(128),
|
| 23 |
+
nn.MaxPool2d(2, 2)
|
| 24 |
+
)
|
| 25 |
+
self.fc_layers = nn.Sequential(
|
| 26 |
+
nn.Flatten(),
|
| 27 |
+
nn.Linear(128 * 16 * 16, 256),
|
| 28 |
+
nn.ReLU(),
|
| 29 |
+
nn.Dropout(0.5),
|
| 30 |
+
nn.Linear(256, 1),
|
| 31 |
+
nn.Sigmoid()
|
| 32 |
+
)
|
| 33 |
+
|
| 34 |
+
def forward(self, x):
|
| 35 |
+
x = self.conv_layers(x)
|
| 36 |
+
x = self.fc_layers(x)
|
| 37 |
+
return x
|
| 38 |
+
|
| 39 |
+
class ModelLoader:
|
| 40 |
+
_instance = None
|
| 41 |
+
|
| 42 |
+
def __new__(cls):
|
| 43 |
+
if cls._instance is None:
|
| 44 |
+
cls._instance = super(ModelLoader, cls).__new__(cls)
|
| 45 |
+
cls._instance._load_models()
|
| 46 |
+
return cls._instance
|
| 47 |
+
|
| 48 |
+
def _load_models(self):
|
| 49 |
+
print("Initializing models...")
|
| 50 |
+
self.device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 51 |
+
print(f"Using device: {self.device}")
|
| 52 |
+
|
| 53 |
+
# CNN - Load immediately as it's lightweight
|
| 54 |
+
print("Loading CNN model...")
|
| 55 |
+
self.cnn_model = GenderCNN()
|
| 56 |
+
model_path = "models/gender_model.pth"
|
| 57 |
+
if os.path.exists(model_path):
|
| 58 |
+
try:
|
| 59 |
+
self.cnn_model.load_state_dict(
|
| 60 |
+
torch.load(model_path, map_location=torch.device("cpu"))
|
| 61 |
+
)
|
| 62 |
+
print("CNN model weights loaded.")
|
| 63 |
+
except Exception as e:
|
| 64 |
+
print(f"Error loading CNN weights: {e}. Model will use random initialization or fallback.")
|
| 65 |
+
self.cnn_model.eval()
|
| 66 |
+
|
| 67 |
+
# Initialize pipelines as None - they will be loaded on first use (lazy loading)
|
| 68 |
+
print("Models initialized with lazy loading strategy.")
|
| 69 |
+
self._sentiment_pipeline = None
|
| 70 |
+
self._qa_pipeline = None
|
| 71 |
+
self._text_gen_pipeline = None
|
| 72 |
+
self._translator_pipeline = None
|
| 73 |
+
self._stt_pipeline = None
|
| 74 |
+
self._zsl_pipeline = None
|
| 75 |
+
self._gender_classifier = None
|
| 76 |
+
|
| 77 |
+
# Lazy loading properties
|
| 78 |
+
@property
|
| 79 |
+
def sentiment_pipeline(self):
|
| 80 |
+
if self._sentiment_pipeline is None:
|
| 81 |
+
print("Loading Sentiment Analysis model...")
|
| 82 |
+
self._sentiment_pipeline = self._safe_pipeline(
|
| 83 |
+
"sentiment-analysis",
|
| 84 |
+
model="cardiffnlp/twitter-roberta-base-sentiment-latest"
|
| 85 |
+
)
|
| 86 |
+
return self._sentiment_pipeline
|
| 87 |
+
|
| 88 |
+
@property
|
| 89 |
+
def qa_pipeline(self):
|
| 90 |
+
if self._qa_pipeline is None:
|
| 91 |
+
print("Loading QA model...")
|
| 92 |
+
self._qa_pipeline = self._safe_pipeline(
|
| 93 |
+
"question-answering",
|
| 94 |
+
model="distilbert-base-cased-distilled-squad"
|
| 95 |
+
)
|
| 96 |
+
return self._qa_pipeline
|
| 97 |
+
|
| 98 |
+
@property
|
| 99 |
+
def text_gen_pipeline(self):
|
| 100 |
+
if self._text_gen_pipeline is None:
|
| 101 |
+
print("Loading Text Generation model...")
|
| 102 |
+
self._text_gen_pipeline = self._safe_pipeline(
|
| 103 |
+
"text-generation",
|
| 104 |
+
model="gpt2"
|
| 105 |
+
)
|
| 106 |
+
return self._text_gen_pipeline
|
| 107 |
+
|
| 108 |
+
@property
|
| 109 |
+
def translator_pipeline(self):
|
| 110 |
+
if self._translator_pipeline is None:
|
| 111 |
+
print("Loading Translation model...")
|
| 112 |
+
self._translator_pipeline = self._safe_pipeline(
|
| 113 |
+
"translation",
|
| 114 |
+
model="Helsinki-NLP/opus-mt-en-ur"
|
| 115 |
+
)
|
| 116 |
+
return self._translator_pipeline
|
| 117 |
+
|
| 118 |
+
@property
|
| 119 |
+
def stt_pipeline(self):
|
| 120 |
+
if self._stt_pipeline is None:
|
| 121 |
+
print("Loading STT model...")
|
| 122 |
+
self._stt_pipeline = self._safe_pipeline(
|
| 123 |
+
"automatic-speech-recognition",
|
| 124 |
+
model="openai/whisper-base"
|
| 125 |
+
)
|
| 126 |
+
return self._stt_pipeline
|
| 127 |
+
|
| 128 |
+
@property
|
| 129 |
+
def zsl_pipeline(self):
|
| 130 |
+
if self._zsl_pipeline is None:
|
| 131 |
+
print("Loading Zero-Shot Classification model...")
|
| 132 |
+
self._zsl_pipeline = self._safe_pipeline(
|
| 133 |
+
"zero-shot-classification",
|
| 134 |
+
model="facebook/bart-large-mnli"
|
| 135 |
+
)
|
| 136 |
+
return self._zsl_pipeline
|
| 137 |
+
|
| 138 |
+
@property
|
| 139 |
+
def gender_classifier(self):
|
| 140 |
+
if self._gender_classifier is None:
|
| 141 |
+
print("Loading Gender Classifier model...")
|
| 142 |
+
self._gender_classifier = self._safe_pipeline(
|
| 143 |
+
"image-classification",
|
| 144 |
+
model="prithivMLmods/Gender-Classifier-Mini"
|
| 145 |
+
)
|
| 146 |
+
return self._gender_classifier
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
def _safe_pipeline(self, *args, **kwargs):
|
| 150 |
+
try:
|
| 151 |
+
# Explicitly set device (0 for CUDA if available, -1 for CPU)
|
| 152 |
+
device_idx = 0 if self.device == "cuda" else -1
|
| 153 |
+
return pipeline(*args, device=device_idx, **kwargs)
|
| 154 |
+
except Exception as e:
|
| 155 |
+
print(f"Failed to load pipeline {args} {kwargs}: {e}")
|
| 156 |
+
return None
|
| 157 |
+
|
| 158 |
+
# Singleton instance
|
| 159 |
+
loader = ModelLoader()
|
reproduction_apriori.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
from mlxtend.frequent_patterns import apriori, association_rules
|
| 3 |
+
from mlxtend.preprocessing import TransactionEncoder
|
| 4 |
+
import io
|
| 5 |
+
|
| 6 |
+
def test_apriori(csv_content, min_support=0.1, min_threshold=0.7, metric="lift"):
|
| 7 |
+
print(f"\n--- Testing with min_support={min_support}, min_threshold={min_threshold}, metric={metric} ---")
|
| 8 |
+
df = pd.read_csv(io.StringIO(csv_content), header=None)
|
| 9 |
+
|
| 10 |
+
transactions = []
|
| 11 |
+
values = df.values.tolist()
|
| 12 |
+
for row in values:
|
| 13 |
+
transaction = [str(item).strip() for item in row if pd.notna(item) and str(item).strip() != '']
|
| 14 |
+
if transaction:
|
| 15 |
+
transactions.append(transaction)
|
| 16 |
+
|
| 17 |
+
print(f"Transactions: {transactions}")
|
| 18 |
+
|
| 19 |
+
te = TransactionEncoder()
|
| 20 |
+
te_ary = te.fit(transactions).transform(transactions)
|
| 21 |
+
encoded_df = pd.DataFrame(te_ary, columns=te.columns_)
|
| 22 |
+
|
| 23 |
+
frequent_itemsets = apriori(encoded_df, min_support=min_support, use_colnames=True)
|
| 24 |
+
if frequent_itemsets.empty:
|
| 25 |
+
print("No frequent itemsets found.")
|
| 26 |
+
return
|
| 27 |
+
|
| 28 |
+
print(f"Frequent Itemsets:\n{frequent_itemsets}")
|
| 29 |
+
|
| 30 |
+
rules = association_rules(frequent_itemsets, metric=metric, min_threshold=min_threshold)
|
| 31 |
+
if rules.empty:
|
| 32 |
+
print("No rules found.")
|
| 33 |
+
return
|
| 34 |
+
|
| 35 |
+
print(f"Rules Found: {len(rules)}")
|
| 36 |
+
print(rules[['antecedents', 'consequents', 'support', 'confidence', 'lift']])
|
| 37 |
+
|
| 38 |
+
# Sample data
|
| 39 |
+
data = """Milk,Bread,Eggs
|
| 40 |
+
Milk,Bread
|
| 41 |
+
Milk,Eggs
|
| 42 |
+
Bread,Eggs
|
| 43 |
+
Milk,Bread,Eggs"""
|
| 44 |
+
|
| 45 |
+
# Test with current app logic (metric="lift")
|
| 46 |
+
test_apriori(data, metric="lift")
|
| 47 |
+
|
| 48 |
+
# Test with confidence (likely what user wants)
|
| 49 |
+
test_apriori(data, metric="confidence")
|
| 50 |
+
|
| 51 |
+
# Test with headers (to see if it breaks)
|
| 52 |
+
data_with_header = """Item1,Item2,Item3
|
| 53 |
+
Milk,Bread,Eggs
|
| 54 |
+
Milk,Bread
|
| 55 |
+
Milk,Eggs
|
| 56 |
+
Bread,Eggs
|
| 57 |
+
Milk,Bread,Eggs"""
|
| 58 |
+
print("\n--- Testing with headers ---")
|
| 59 |
+
test_apriori(data_with_header, metric="lift")
|
reproduction_apriori_v2.py
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
from mlxtend.frequent_patterns import apriori, association_rules
|
| 3 |
+
from mlxtend.preprocessing import TransactionEncoder
|
| 4 |
+
import io
|
| 5 |
+
|
| 6 |
+
def test_apriori_improved(csv_content, min_support=0.1, min_threshold=0.7, metric="lift", has_header=False):
|
| 7 |
+
print(f"\n--- Testing: support={min_support}, threshold={min_threshold}, metric={metric}, header={has_header} ---")
|
| 8 |
+
|
| 9 |
+
# Simulate the file reading logic in app.py
|
| 10 |
+
df = pd.read_csv(io.StringIO(csv_content), header=0 if has_header else None)
|
| 11 |
+
|
| 12 |
+
transactions = []
|
| 13 |
+
values = df.values.tolist()
|
| 14 |
+
for row in values:
|
| 15 |
+
# Improved logic: set, sorted, stripped
|
| 16 |
+
transaction = sorted(list(set([str(item).strip() for item in row if pd.notna(item) and str(item).strip() != ''])))
|
| 17 |
+
if transaction:
|
| 18 |
+
transactions.append(transaction)
|
| 19 |
+
|
| 20 |
+
print(f"Transactions count: {len(transactions)}")
|
| 21 |
+
|
| 22 |
+
te = TransactionEncoder()
|
| 23 |
+
te_ary = te.fit(transactions).transform(transactions)
|
| 24 |
+
encoded_df = pd.DataFrame(te_ary, columns=te.columns_)
|
| 25 |
+
|
| 26 |
+
frequent_itemsets = apriori(encoded_df, min_support=min_support, use_colnames=True)
|
| 27 |
+
if frequent_itemsets.empty:
|
| 28 |
+
print("No frequent itemsets found.")
|
| 29 |
+
return
|
| 30 |
+
|
| 31 |
+
rules = association_rules(frequent_itemsets, metric=metric, min_threshold=min_threshold)
|
| 32 |
+
if rules.empty:
|
| 33 |
+
print("No rules found.")
|
| 34 |
+
return
|
| 35 |
+
|
| 36 |
+
print(f"Rules Found: {len(rules)}")
|
| 37 |
+
print(rules[['antecedents', 'consequents', 'support', 'confidence', 'lift']].head())
|
| 38 |
+
|
| 39 |
+
# Sample data with header
|
| 40 |
+
data_with_header = """Item1,Item2,Item3
|
| 41 |
+
Milk,Bread,Eggs
|
| 42 |
+
Milk,Bread
|
| 43 |
+
Milk,Eggs
|
| 44 |
+
Bread,Eggs
|
| 45 |
+
Milk,Bread,Eggs"""
|
| 46 |
+
|
| 47 |
+
# Test with header=True
|
| 48 |
+
test_apriori_improved(data_with_header, metric="confidence", has_header=True)
|
| 49 |
+
|
| 50 |
+
# Test with header=False (should see Item1, Item2, Item3 as products in transactions)
|
| 51 |
+
test_apriori_improved(data_with_header, metric="confidence", has_header=False)
|
| 52 |
+
|
| 53 |
+
# Test with lift
|
| 54 |
+
test_apriori_improved(data_with_header, metric="lift", min_threshold=1.1, has_header=True)
|
requirements.txt
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
flask
|
| 2 |
+
transformers
|
| 3 |
+
torch
|
| 4 |
+
pillow
|
| 5 |
+
numpy
|
| 6 |
+
gtts
|
| 7 |
+
SpeechRecognition
|
| 8 |
+
sentencepiece
|
| 9 |
+
pandas
|
| 10 |
+
scikit-learn
|
| 11 |
+
matplotlib
|
| 12 |
+
librosa
|
| 13 |
+
mlxtend
|
| 14 |
+
sacremoses
|
| 15 |
+
scipy
|
| 16 |
+
soundfile
|
| 17 |
+
openpyxl
|
| 18 |
+
werkzeug
|
| 19 |
+
openai-whisper
|
| 20 |
+
gunicorn
|
scripts/setup_env.ps1
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# PowerShell script to create a venv and install requirements
|
| 2 |
+
# Run in the workspace root: .\scripts\setup_env.ps1
|
| 3 |
+
|
| 4 |
+
# Create virtual env
|
| 5 |
+
python -m venv .venv
|
| 6 |
+
|
| 7 |
+
# Activate venv in current session (PowerShell)
|
| 8 |
+
. .venv\Scripts\Activate.ps1
|
| 9 |
+
|
| 10 |
+
# Upgrade pip and install requirements
|
| 11 |
+
python -m pip install --upgrade pip
|
| 12 |
+
pip install -r requirements.txt
|
| 13 |
+
|
| 14 |
+
Write-Host "Setup complete. Activate with '. .venv\Scripts\Activate.ps1' and run 'python app.py' to start the server."
|
static/audio/question.wav
ADDED
|
Binary file (68.9 kB). View file
|
|
|
static/audio/recording.wav
ADDED
|
File without changes
|
static/css/main.css
ADDED
|
@@ -0,0 +1,184 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
:root {
|
| 2 |
+
--bg-deep: #05070a;
|
| 3 |
+
--sidebar-bg: #0a0d14;
|
| 4 |
+
--card-bg: rgba(16, 20, 28, 0.7);
|
| 5 |
+
--glass-border: rgba(255, 255, 255, 0.08);
|
| 6 |
+
--accent-blue: #00d2ff;
|
| 7 |
+
--accent-purple: #9d50bb;
|
| 8 |
+
--text-primary: #f0f2f5;
|
| 9 |
+
--text-secondary: #94a3b8;
|
| 10 |
+
--quantum-gradient: linear-gradient(135deg, var(--accent-blue), var(--accent-purple));
|
| 11 |
+
--sidebar-width: 280px;
|
| 12 |
+
--radius-lg: 16px;
|
| 13 |
+
--radius-md: 12px;
|
| 14 |
+
--transition: all 0.3s cubic-bezier(0.4, 0, 0.2, 1);
|
| 15 |
+
}
|
| 16 |
+
|
| 17 |
+
* {
|
| 18 |
+
margin: 0;
|
| 19 |
+
padding: 0;
|
| 20 |
+
box-sizing: border-box;
|
| 21 |
+
font-family: 'Outfit', -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
|
| 22 |
+
}
|
| 23 |
+
|
| 24 |
+
body {
|
| 25 |
+
background: var(--bg-deep);
|
| 26 |
+
color: var(--text-primary);
|
| 27 |
+
overflow-x: hidden;
|
| 28 |
+
min-height: 100vh;
|
| 29 |
+
}
|
| 30 |
+
|
| 31 |
+
/* Layout Grid */
|
| 32 |
+
.dashboard-wrapper {
|
| 33 |
+
display: grid;
|
| 34 |
+
grid-template-columns: var(--sidebar-width) 1fr;
|
| 35 |
+
min-height: 100vh;
|
| 36 |
+
}
|
| 37 |
+
|
| 38 |
+
/* Sidebar Styling */
|
| 39 |
+
.sidebar {
|
| 40 |
+
background: var(--sidebar-bg);
|
| 41 |
+
border-right: 1px solid var(--glass-border);
|
| 42 |
+
padding: 2rem 1.5rem;
|
| 43 |
+
display: flex;
|
| 44 |
+
flex-direction: column;
|
| 45 |
+
gap: 2rem;
|
| 46 |
+
position: sticky;
|
| 47 |
+
top: 0;
|
| 48 |
+
height: 100vh;
|
| 49 |
+
}
|
| 50 |
+
|
| 51 |
+
.brand {
|
| 52 |
+
display: flex;
|
| 53 |
+
align-items: center;
|
| 54 |
+
gap: 12px;
|
| 55 |
+
padding-left: 0.5rem;
|
| 56 |
+
}
|
| 57 |
+
|
| 58 |
+
.brand-icon {
|
| 59 |
+
width: 40px;
|
| 60 |
+
height: 40px;
|
| 61 |
+
background: var(--quantum-gradient);
|
| 62 |
+
border-radius: 10px;
|
| 63 |
+
display: grid;
|
| 64 |
+
place-items: center;
|
| 65 |
+
font-weight: 800;
|
| 66 |
+
color: white;
|
| 67 |
+
}
|
| 68 |
+
|
| 69 |
+
.brand-name {
|
| 70 |
+
font-size: 1.25rem;
|
| 71 |
+
font-weight: 700;
|
| 72 |
+
letter-spacing: -0.5px;
|
| 73 |
+
}
|
| 74 |
+
|
| 75 |
+
.nav-menu {
|
| 76 |
+
list-style: none;
|
| 77 |
+
display: flex;
|
| 78 |
+
flex-direction: column;
|
| 79 |
+
gap: 0.5rem;
|
| 80 |
+
}
|
| 81 |
+
|
| 82 |
+
.nav-link {
|
| 83 |
+
display: flex;
|
| 84 |
+
align-items: center;
|
| 85 |
+
gap: 12px;
|
| 86 |
+
padding: 0.75rem 1rem;
|
| 87 |
+
color: var(--text-secondary);
|
| 88 |
+
text-decoration: none;
|
| 89 |
+
border-radius: var(--radius-md);
|
| 90 |
+
transition: var(--transition);
|
| 91 |
+
font-weight: 500;
|
| 92 |
+
}
|
| 93 |
+
|
| 94 |
+
.nav-link:hover {
|
| 95 |
+
background: rgba(255, 255, 255, 0.05);
|
| 96 |
+
color: var(--text-primary);
|
| 97 |
+
}
|
| 98 |
+
|
| 99 |
+
.nav-link.active {
|
| 100 |
+
background: rgba(0, 210, 255, 0.1);
|
| 101 |
+
color: var(--accent-blue);
|
| 102 |
+
}
|
| 103 |
+
|
| 104 |
+
/* Main Content area */
|
| 105 |
+
.main-content {
|
| 106 |
+
padding: 2rem;
|
| 107 |
+
background: radial-gradient(circle at top right, rgba(0, 210, 255, 0.05), transparent 40%);
|
| 108 |
+
}
|
| 109 |
+
|
| 110 |
+
.top-bar {
|
| 111 |
+
display: flex;
|
| 112 |
+
justify-content: space-between;
|
| 113 |
+
align-items: center;
|
| 114 |
+
margin-bottom: 2.5rem;
|
| 115 |
+
}
|
| 116 |
+
|
| 117 |
+
.page-title h1 {
|
| 118 |
+
font-size: 2rem;
|
| 119 |
+
font-weight: 700;
|
| 120 |
+
margin-bottom: 4px;
|
| 121 |
+
}
|
| 122 |
+
|
| 123 |
+
.page-title p {
|
| 124 |
+
color: var(--text-secondary);
|
| 125 |
+
}
|
| 126 |
+
|
| 127 |
+
/* Reusable Components */
|
| 128 |
+
.glass-card {
|
| 129 |
+
background: var(--card-bg);
|
| 130 |
+
backdrop-filter: blur(12px);
|
| 131 |
+
border: 1px solid var(--glass-border);
|
| 132 |
+
border-radius: var(--radius-lg);
|
| 133 |
+
padding: 1.5rem;
|
| 134 |
+
transition: var(--transition);
|
| 135 |
+
}
|
| 136 |
+
|
| 137 |
+
.glass-card:hover {
|
| 138 |
+
border-color: rgba(255, 255, 255, 0.15);
|
| 139 |
+
transform: translateY(-4px);
|
| 140 |
+
}
|
| 141 |
+
|
| 142 |
+
.btn-quantum {
|
| 143 |
+
background: var(--quantum-gradient);
|
| 144 |
+
color: white;
|
| 145 |
+
padding: 0.75rem 1.5rem;
|
| 146 |
+
border: none;
|
| 147 |
+
border-radius: var(--radius-md);
|
| 148 |
+
font-weight: 600;
|
| 149 |
+
cursor: pointer;
|
| 150 |
+
transition: var(--transition);
|
| 151 |
+
text-decoration: none;
|
| 152 |
+
display: inline-flex;
|
| 153 |
+
align-items: center;
|
| 154 |
+
gap: 8px;
|
| 155 |
+
}
|
| 156 |
+
|
| 157 |
+
.btn-quantum:hover {
|
| 158 |
+
opacity: 0.9;
|
| 159 |
+
box-shadow: 0 0 20px rgba(0, 210, 255, 0.3);
|
| 160 |
+
}
|
| 161 |
+
|
| 162 |
+
/* Animations */
|
| 163 |
+
@keyframes fadeIn {
|
| 164 |
+
from { opacity: 0; transform: translateY(10px); }
|
| 165 |
+
to { opacity: 1; transform: translateY(0); }
|
| 166 |
+
}
|
| 167 |
+
|
| 168 |
+
.animate-fade-in {
|
| 169 |
+
animation: fadeIn 0.5s ease forwards;
|
| 170 |
+
}
|
| 171 |
+
|
| 172 |
+
/* Responsive */
|
| 173 |
+
@media (max-width: 1024px) {
|
| 174 |
+
.dashboard-wrapper {
|
| 175 |
+
grid-template-columns: 80px 1fr;
|
| 176 |
+
}
|
| 177 |
+
.brand-name, .nav-text {
|
| 178 |
+
display: none;
|
| 179 |
+
}
|
| 180 |
+
.sidebar {
|
| 181 |
+
padding: 2rem 1rem;
|
| 182 |
+
align-items: center;
|
| 183 |
+
}
|
| 184 |
+
}
|
static/css/style.css
ADDED
|
@@ -0,0 +1,461 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
:root {
|
| 2 |
+
--primary: #6366f1;
|
| 3 |
+
--primary-hover: #4f46e5;
|
| 4 |
+
--secondary: #c084fc;
|
| 5 |
+
--bg-dark: #070b14;
|
| 6 |
+
--card-bg: rgba(30, 41, 59, 0.4);
|
| 7 |
+
--text-main: #f8fafc;
|
| 8 |
+
--text-dim: #94a3b8;
|
| 9 |
+
--glass-border: rgba(255, 255, 255, 0.08);
|
| 10 |
+
--glow: rgba(99, 102, 241, 0.15);
|
| 11 |
+
}
|
| 12 |
+
|
| 13 |
+
* {
|
| 14 |
+
box-sizing: border-box;
|
| 15 |
+
margin: 0;
|
| 16 |
+
padding: 0;
|
| 17 |
+
}
|
| 18 |
+
|
| 19 |
+
body {
|
| 20 |
+
font-family: 'Inter', system-ui, -apple-system, sans-serif;
|
| 21 |
+
background: var(--bg-dark);
|
| 22 |
+
color: var(--text-main);
|
| 23 |
+
min-height: 100vh;
|
| 24 |
+
overflow-x: hidden;
|
| 25 |
+
background-image:
|
| 26 |
+
radial-gradient(circle at 20% 20%, rgba(99, 102, 241, 0.1) 0%, transparent 40%),
|
| 27 |
+
radial-gradient(circle at 80% 80%, rgba(192, 132, 252, 0.1) 0%, transparent 40%);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
/* Background animated glow */
|
| 31 |
+
body::before {
|
| 32 |
+
content: '';
|
| 33 |
+
position: fixed;
|
| 34 |
+
top: 0;
|
| 35 |
+
left: 0;
|
| 36 |
+
width: 100%;
|
| 37 |
+
height: 100%;
|
| 38 |
+
background: url('https://grainy-gradients.vercel.app/noise.svg');
|
| 39 |
+
opacity: 0.15;
|
| 40 |
+
pointer-events: none;
|
| 41 |
+
z-index: 0;
|
| 42 |
+
}
|
| 43 |
+
|
| 44 |
+
.container {
|
| 45 |
+
position: relative;
|
| 46 |
+
z-index: 1;
|
| 47 |
+
max-width: 1100px;
|
| 48 |
+
margin: 0 auto;
|
| 49 |
+
padding: 3rem 1.5rem;
|
| 50 |
+
}
|
| 51 |
+
|
| 52 |
+
header {
|
| 53 |
+
text-align: center;
|
| 54 |
+
padding-bottom: 4rem;
|
| 55 |
+
animation: fadeInDown 0.8s ease-out;
|
| 56 |
+
}
|
| 57 |
+
|
| 58 |
+
@keyframes fadeInDown {
|
| 59 |
+
from {
|
| 60 |
+
opacity: 0;
|
| 61 |
+
transform: translateY(-20px);
|
| 62 |
+
}
|
| 63 |
+
|
| 64 |
+
to {
|
| 65 |
+
opacity: 1;
|
| 66 |
+
transform: translateY(0);
|
| 67 |
+
}
|
| 68 |
+
}
|
| 69 |
+
|
| 70 |
+
h1 {
|
| 71 |
+
font-size: clamp(2.5rem, 8vw, 4.5rem);
|
| 72 |
+
font-weight: 900;
|
| 73 |
+
letter-spacing: -0.02em;
|
| 74 |
+
background: linear-gradient(135deg, #818cf8 0%, #c084fc 100%);
|
| 75 |
+
-webkit-background-clip: text;
|
| 76 |
+
background-clip: text;
|
| 77 |
+
-webkit-text-fill-color: transparent;
|
| 78 |
+
margin-bottom: 1rem;
|
| 79 |
+
filter: drop-shadow(0 0 10px rgba(129, 140, 248, 0.3));
|
| 80 |
+
}
|
| 81 |
+
|
| 82 |
+
.subtitle {
|
| 83 |
+
color: var(--text-dim);
|
| 84 |
+
font-size: 1.25rem;
|
| 85 |
+
max-width: 600px;
|
| 86 |
+
margin: 0 auto;
|
| 87 |
+
line-height: 1.6;
|
| 88 |
+
}
|
| 89 |
+
|
| 90 |
+
/* Grid Layout */
|
| 91 |
+
.grid {
|
| 92 |
+
display: grid;
|
| 93 |
+
grid-template-columns: repeat(auto-fit, minmax(300px, 1fr));
|
| 94 |
+
gap: 2rem;
|
| 95 |
+
margin-top: 2rem;
|
| 96 |
+
}
|
| 97 |
+
|
| 98 |
+
.card {
|
| 99 |
+
background: var(--card-bg);
|
| 100 |
+
backdrop-filter: blur(16px);
|
| 101 |
+
-webkit-backdrop-filter: blur(16px);
|
| 102 |
+
border: 1px solid var(--glass-border);
|
| 103 |
+
border-radius: 2rem;
|
| 104 |
+
padding: 2.5rem;
|
| 105 |
+
transition: all 0.4s cubic-bezier(0.23, 1, 0.32, 1);
|
| 106 |
+
text-decoration: none;
|
| 107 |
+
color: inherit;
|
| 108 |
+
display: flex;
|
| 109 |
+
flex-direction: column;
|
| 110 |
+
align-items: center;
|
| 111 |
+
text-align: center;
|
| 112 |
+
position: relative;
|
| 113 |
+
overflow: hidden;
|
| 114 |
+
}
|
| 115 |
+
|
| 116 |
+
.card::after {
|
| 117 |
+
content: '';
|
| 118 |
+
position: absolute;
|
| 119 |
+
top: 0;
|
| 120 |
+
left: 0;
|
| 121 |
+
width: 100%;
|
| 122 |
+
height: 100%;
|
| 123 |
+
background: linear-gradient(135deg, rgba(255, 255, 255, 0.05) 0%, transparent 100%);
|
| 124 |
+
pointer-events: none;
|
| 125 |
+
}
|
| 126 |
+
|
| 127 |
+
.card:hover {
|
| 128 |
+
transform: translateY(-12px) scale(1.02);
|
| 129 |
+
border-color: rgba(99, 102, 241, 0.3);
|
| 130 |
+
box-shadow: 0 25px 50px -12px rgba(0, 0, 0, 0.5), 0 0 20px var(--glow);
|
| 131 |
+
background: rgba(30, 41, 59, 0.6);
|
| 132 |
+
}
|
| 133 |
+
|
| 134 |
+
.card i {
|
| 135 |
+
font-size: 3rem;
|
| 136 |
+
margin-bottom: 1.5rem;
|
| 137 |
+
background: linear-gradient(135deg, #6366f1, #a855f7);
|
| 138 |
+
-webkit-background-clip: text;
|
| 139 |
+
background-clip: text;
|
| 140 |
+
-webkit-text-fill-color: transparent;
|
| 141 |
+
transition: transform 0.3s ease;
|
| 142 |
+
}
|
| 143 |
+
|
| 144 |
+
.card:hover i {
|
| 145 |
+
transform: scale(1.1) rotate(5deg);
|
| 146 |
+
}
|
| 147 |
+
|
| 148 |
+
.card h3 {
|
| 149 |
+
margin-bottom: 0.75rem;
|
| 150 |
+
font-size: 1.75rem;
|
| 151 |
+
font-weight: 700;
|
| 152 |
+
}
|
| 153 |
+
|
| 154 |
+
.card p {
|
| 155 |
+
color: var(--text-dim);
|
| 156 |
+
font-size: 1rem;
|
| 157 |
+
line-height: 1.6;
|
| 158 |
+
}
|
| 159 |
+
|
| 160 |
+
/* Form Styles */
|
| 161 |
+
.form-card {
|
| 162 |
+
background: var(--card-bg);
|
| 163 |
+
backdrop-filter: blur(20px);
|
| 164 |
+
border: 1px solid var(--glass-border);
|
| 165 |
+
border-radius: 2.5rem;
|
| 166 |
+
padding: 3rem;
|
| 167 |
+
max-width: 700px;
|
| 168 |
+
margin: 3rem auto;
|
| 169 |
+
box-shadow: 0 40px 100px -20px rgba(0, 0, 0, 0.4);
|
| 170 |
+
}
|
| 171 |
+
|
| 172 |
+
.form-group {
|
| 173 |
+
margin-bottom: 2rem;
|
| 174 |
+
}
|
| 175 |
+
|
| 176 |
+
label {
|
| 177 |
+
display: block;
|
| 178 |
+
margin-bottom: 0.75rem;
|
| 179 |
+
font-weight: 600;
|
| 180 |
+
color: var(--text-main);
|
| 181 |
+
font-size: 1rem;
|
| 182 |
+
}
|
| 183 |
+
|
| 184 |
+
textarea,
|
| 185 |
+
input[type="text"],
|
| 186 |
+
input[type="file"] {
|
| 187 |
+
width: 100%;
|
| 188 |
+
background: rgba(15, 23, 42, 0.6);
|
| 189 |
+
border: 1px solid var(--glass-border);
|
| 190 |
+
border-radius: 1rem;
|
| 191 |
+
padding: 1.25rem;
|
| 192 |
+
color: white;
|
| 193 |
+
font-family: inherit;
|
| 194 |
+
font-size: 1rem;
|
| 195 |
+
transition: all 0.3s;
|
| 196 |
+
}
|
| 197 |
+
|
| 198 |
+
textarea:focus,
|
| 199 |
+
input[type="text"]:focus {
|
| 200 |
+
outline: none;
|
| 201 |
+
border-color: var(--primary);
|
| 202 |
+
background: rgba(15, 23, 42, 0.8);
|
| 203 |
+
box-shadow: 0 0 0 4px rgba(99, 102, 241, 0.1);
|
| 204 |
+
}
|
| 205 |
+
|
| 206 |
+
button {
|
| 207 |
+
background: linear-gradient(135deg, var(--primary) 0%, var(--secondary) 100%);
|
| 208 |
+
color: white;
|
| 209 |
+
border: none;
|
| 210 |
+
padding: 1.25rem 2rem;
|
| 211 |
+
border-radius: 1rem;
|
| 212 |
+
font-weight: 700;
|
| 213 |
+
font-size: 1.1rem;
|
| 214 |
+
cursor: pointer;
|
| 215 |
+
transition: all 0.3s;
|
| 216 |
+
width: 100%;
|
| 217 |
+
display: flex;
|
| 218 |
+
align-items: center;
|
| 219 |
+
justify-content: center;
|
| 220 |
+
gap: 0.75rem;
|
| 221 |
+
box-shadow: 0 10px 15px -3px rgba(99, 102, 241, 0.3);
|
| 222 |
+
}
|
| 223 |
+
|
| 224 |
+
button:hover {
|
| 225 |
+
transform: translateY(-2px);
|
| 226 |
+
box-shadow: 0 20px 25px -5px rgba(99, 102, 241, 0.4);
|
| 227 |
+
filter: brightness(1.1);
|
| 228 |
+
}
|
| 229 |
+
|
| 230 |
+
button:active {
|
| 231 |
+
transform: translateY(0);
|
| 232 |
+
}
|
| 233 |
+
|
| 234 |
+
/* Results Section */
|
| 235 |
+
.result-box {
|
| 236 |
+
margin-top: 3rem;
|
| 237 |
+
padding: 2.5rem;
|
| 238 |
+
background: linear-gradient(135deg, rgba(99, 102, 241, 0.1), rgba(192, 132, 252, 0.1));
|
| 239 |
+
border: 1px solid rgba(99, 102, 241, 0.2);
|
| 240 |
+
border-radius: 1.5rem;
|
| 241 |
+
animation: slideUp 0.5s ease-out;
|
| 242 |
+
}
|
| 243 |
+
|
| 244 |
+
@keyframes slideUp {
|
| 245 |
+
from {
|
| 246 |
+
opacity: 0;
|
| 247 |
+
transform: translateY(20px);
|
| 248 |
+
}
|
| 249 |
+
|
| 250 |
+
to {
|
| 251 |
+
opacity: 1;
|
| 252 |
+
transform: translateY(0);
|
| 253 |
+
}
|
| 254 |
+
}
|
| 255 |
+
|
| 256 |
+
.back-link {
|
| 257 |
+
display: inline-flex;
|
| 258 |
+
align-items: center;
|
| 259 |
+
gap: 0.5rem;
|
| 260 |
+
margin-bottom: 2rem;
|
| 261 |
+
color: var(--text-dim);
|
| 262 |
+
text-decoration: none;
|
| 263 |
+
font-weight: 500;
|
| 264 |
+
transition: color 0.2s;
|
| 265 |
+
}
|
| 266 |
+
|
| 267 |
+
.back-link:hover {
|
| 268 |
+
color: var(--text-main);
|
| 269 |
+
}
|
| 270 |
+
|
| 271 |
+
/* Voice Recorder UI */
|
| 272 |
+
.recorder-container {
|
| 273 |
+
display: flex;
|
| 274 |
+
flex-direction: column;
|
| 275 |
+
align-items: center;
|
| 276 |
+
gap: 1.5rem;
|
| 277 |
+
margin-bottom: 2rem;
|
| 278 |
+
}
|
| 279 |
+
|
| 280 |
+
.record-btn {
|
| 281 |
+
width: 80px;
|
| 282 |
+
height: 80px;
|
| 283 |
+
border-radius: 50%;
|
| 284 |
+
background: #ef4444;
|
| 285 |
+
border: 6px solid rgba(239, 68, 68, 0.2);
|
| 286 |
+
color: white;
|
| 287 |
+
display: flex;
|
| 288 |
+
align-items: center;
|
| 289 |
+
justify-content: center;
|
| 290 |
+
cursor: pointer;
|
| 291 |
+
transition: all 0.3s;
|
| 292 |
+
font-size: 1.5rem;
|
| 293 |
+
}
|
| 294 |
+
|
| 295 |
+
.record-btn.recording {
|
| 296 |
+
animation: pulse 1.5s infinite;
|
| 297 |
+
background: #dc2626;
|
| 298 |
+
border-radius: 1rem;
|
| 299 |
+
}
|
| 300 |
+
|
| 301 |
+
@keyframes pulse {
|
| 302 |
+
0% {
|
| 303 |
+
transform: scale(1);
|
| 304 |
+
box-shadow: 0 0 0 0 rgba(239, 68, 68, 0.7);
|
| 305 |
+
}
|
| 306 |
+
|
| 307 |
+
70% {
|
| 308 |
+
transform: scale(1.05);
|
| 309 |
+
box-shadow: 0 0 0 20px rgba(239, 68, 68, 0);
|
| 310 |
+
}
|
| 311 |
+
|
| 312 |
+
100% {
|
| 313 |
+
transform: scale(1);
|
| 314 |
+
box-shadow: 0 0 0 0 rgba(239, 68, 68, 0);
|
| 315 |
+
}
|
| 316 |
+
}
|
| 317 |
+
|
| 318 |
+
/* Loading Overlay */
|
| 319 |
+
#loading-overlay {
|
| 320 |
+
position: fixed;
|
| 321 |
+
top: 0;
|
| 322 |
+
left: 0;
|
| 323 |
+
width: 100%;
|
| 324 |
+
height: 100%;
|
| 325 |
+
background: rgba(7, 11, 20, 0.8);
|
| 326 |
+
backdrop-filter: blur(8px);
|
| 327 |
+
display: none;
|
| 328 |
+
flex-direction: column;
|
| 329 |
+
align-items: center;
|
| 330 |
+
justify-content: center;
|
| 331 |
+
z-index: 1000;
|
| 332 |
+
}
|
| 333 |
+
|
| 334 |
+
.loader {
|
| 335 |
+
width: 60px;
|
| 336 |
+
height: 60px;
|
| 337 |
+
border: 5px solid rgba(99, 102, 241, 0.1);
|
| 338 |
+
border-top: 5px solid var(--primary);
|
| 339 |
+
border-radius: 50%;
|
| 340 |
+
animation: spin 1s linear infinite;
|
| 341 |
+
margin-bottom: 1.5rem;
|
| 342 |
+
}
|
| 343 |
+
|
| 344 |
+
@keyframes spin {
|
| 345 |
+
0% {
|
| 346 |
+
transform: rotate(0deg);
|
| 347 |
+
}
|
| 348 |
+
|
| 349 |
+
100% {
|
| 350 |
+
transform: rotate(360deg);
|
| 351 |
+
}
|
| 352 |
+
}
|
| 353 |
+
|
| 354 |
+
/* Image Preview */
|
| 355 |
+
#preview-container {
|
| 356 |
+
width: 100%;
|
| 357 |
+
height: 300px;
|
| 358 |
+
border-radius: 1.5rem;
|
| 359 |
+
overflow: hidden;
|
| 360 |
+
margin-bottom: 1.5rem;
|
| 361 |
+
display: none;
|
| 362 |
+
border: 2px dashed var(--glass-border);
|
| 363 |
+
}
|
| 364 |
+
|
| 365 |
+
#preview-img {
|
| 366 |
+
width: 100%;
|
| 367 |
+
height: 100%;
|
| 368 |
+
object-fit: cover;
|
| 369 |
+
}
|
| 370 |
+
|
| 371 |
+
/* Confidence Meter */
|
| 372 |
+
.confidence-container {
|
| 373 |
+
margin-top: 1.5rem;
|
| 374 |
+
text-align: left;
|
| 375 |
+
}
|
| 376 |
+
|
| 377 |
+
.confidence-bar {
|
| 378 |
+
background: rgba(15, 23, 42, 0.4);
|
| 379 |
+
border-radius: 1rem;
|
| 380 |
+
height: 12px;
|
| 381 |
+
width: 100%;
|
| 382 |
+
overflow: hidden;
|
| 383 |
+
margin: 0.5rem 0;
|
| 384 |
+
}
|
| 385 |
+
|
| 386 |
+
.confidence-fill {
|
| 387 |
+
background: linear-gradient(90deg, var(--primary), var(--secondary));
|
| 388 |
+
height: 100%;
|
| 389 |
+
transition: width 1s cubic-bezier(0.34, 1.56, 0.64, 1);
|
| 390 |
+
}
|
| 391 |
+
|
| 392 |
+
/* Copy Button */
|
| 393 |
+
.copy-btn {
|
| 394 |
+
background: rgba(255, 255, 255, 0.05);
|
| 395 |
+
border: 1px solid var(--glass-border);
|
| 396 |
+
color: var(--text-dim);
|
| 397 |
+
padding: 0.5rem 1rem;
|
| 398 |
+
border-radius: 0.75rem;
|
| 399 |
+
font-size: 0.875rem;
|
| 400 |
+
cursor: pointer;
|
| 401 |
+
display: inline-flex;
|
| 402 |
+
align-items: center;
|
| 403 |
+
gap: 0.5rem;
|
| 404 |
+
transition: all 0.2s;
|
| 405 |
+
margin-bottom: 1rem;
|
| 406 |
+
width: auto;
|
| 407 |
+
}
|
| 408 |
+
|
| 409 |
+
.copy-btn:hover {
|
| 410 |
+
background: rgba(255, 255, 255, 0.1);
|
| 411 |
+
color: var(--text-main);
|
| 412 |
+
}
|
| 413 |
+
|
| 414 |
+
/* Range Slider */
|
| 415 |
+
.slider-container {
|
| 416 |
+
margin-bottom: 2rem;
|
| 417 |
+
}
|
| 418 |
+
|
| 419 |
+
input[type="range"] {
|
| 420 |
+
-webkit-appearance: none;
|
| 421 |
+
width: 100%;
|
| 422 |
+
height: 6px;
|
| 423 |
+
background: rgba(15, 23, 42, 0.6);
|
| 424 |
+
border-radius: 5px;
|
| 425 |
+
outline: none;
|
| 426 |
+
margin: 1rem 0;
|
| 427 |
+
}
|
| 428 |
+
|
| 429 |
+
input[type="range"]::-webkit-slider-thumb {
|
| 430 |
+
-webkit-appearance: none;
|
| 431 |
+
appearance: none;
|
| 432 |
+
width: 20px;
|
| 433 |
+
height: 20px;
|
| 434 |
+
background: var(--primary);
|
| 435 |
+
border-radius: 50%;
|
| 436 |
+
cursor: pointer;
|
| 437 |
+
box-shadow: 0 0 10px rgba(99, 102, 241, 0.5);
|
| 438 |
+
}
|
| 439 |
+
|
| 440 |
+
/* Floating Actions */
|
| 441 |
+
.floating-actions {
|
| 442 |
+
display: flex;
|
| 443 |
+
justify-content: flex-end;
|
| 444 |
+
gap: 0.5rem;
|
| 445 |
+
}
|
| 446 |
+
|
| 447 |
+
/* Responsive */
|
| 448 |
+
@media (max-width: 640px) {
|
| 449 |
+
.container {
|
| 450 |
+
padding: 1.5rem;
|
| 451 |
+
}
|
| 452 |
+
|
| 453 |
+
.form-card {
|
| 454 |
+
padding: 1.5rem;
|
| 455 |
+
border-radius: 1.5rem;
|
| 456 |
+
}
|
| 457 |
+
|
| 458 |
+
h1 {
|
| 459 |
+
font-size: 2.5rem;
|
| 460 |
+
}
|
| 461 |
+
}
|
static/style.css
ADDED
|
@@ -0,0 +1,192 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/* Google Fonts */
|
| 2 |
+
@import url('https://fonts.googleapis.com/css2?family=Outfit:wght@300;400;600&display=swap');
|
| 3 |
+
|
| 4 |
+
:root {
|
| 5 |
+
--bg-color: #0f172a;
|
| 6 |
+
--card-bg: #1e293b;
|
| 7 |
+
--text-color: #f1f5f9;
|
| 8 |
+
--accent-color: #3b82f6;
|
| 9 |
+
--accent-hover: #2563eb;
|
| 10 |
+
--gradient-1: linear-gradient(135deg, #3b82f6 0%, #8b5cf6 100%);
|
| 11 |
+
--glass-bg: rgba(30, 41, 59, 0.7);
|
| 12 |
+
--border-color: rgba(255, 255, 255, 0.1);
|
| 13 |
+
--container-max: 960px;
|
| 14 |
+
}
|
| 15 |
+
|
| 16 |
+
* {
|
| 17 |
+
margin: 0;
|
| 18 |
+
padding: 0;
|
| 19 |
+
box-sizing: border-box;
|
| 20 |
+
font-family: 'Outfit', sans-serif;
|
| 21 |
+
}
|
| 22 |
+
|
| 23 |
+
body {
|
| 24 |
+
background-color: var(--bg-color);
|
| 25 |
+
color: var(--text-color);
|
| 26 |
+
min-height: 100vh;
|
| 27 |
+
display: flex;
|
| 28 |
+
flex-direction: column;
|
| 29 |
+
align-items: center;
|
| 30 |
+
padding: 2rem;
|
| 31 |
+
}
|
| 32 |
+
|
| 33 |
+
h1 {
|
| 34 |
+
font-size: 2rem;
|
| 35 |
+
font-weight: 600;
|
| 36 |
+
margin-bottom: 2rem;
|
| 37 |
+
text-align: center;
|
| 38 |
+
background: var(--gradient-1);
|
| 39 |
+
-webkit-background-clip: text;
|
| 40 |
+
-webkit-text-fill-color: transparent;
|
| 41 |
+
}
|
| 42 |
+
|
| 43 |
+
.container {
|
| 44 |
+
width: 100%;
|
| 45 |
+
max-width: var(--container-max);
|
| 46 |
+
display: grid;
|
| 47 |
+
grid-template-columns: repeat(auto-fit, minmax(300px, 1fr));
|
| 48 |
+
gap: 2rem;
|
| 49 |
+
}
|
| 50 |
+
|
| 51 |
+
/* Service Cards (Index) */
|
| 52 |
+
.card {
|
| 53 |
+
background: var(--card-bg);
|
| 54 |
+
padding: 2rem;
|
| 55 |
+
border-radius: 1rem;
|
| 56 |
+
border: 1px solid var(--border-color);
|
| 57 |
+
transition: transform 0.3s ease, box-shadow 0.3s ease;
|
| 58 |
+
text-decoration: none;
|
| 59 |
+
color: inherit;
|
| 60 |
+
display: flex;
|
| 61 |
+
flex-direction: column;
|
| 62 |
+
align-items: center;
|
| 63 |
+
justify-content: center;
|
| 64 |
+
text-align: center;
|
| 65 |
+
}
|
| 66 |
+
|
| 67 |
+
.card:hover {
|
| 68 |
+
transform: translateY(-5px);
|
| 69 |
+
box-shadow: 0 10px 20px rgba(0, 0, 0, 0.3);
|
| 70 |
+
border-color: var(--accent-color);
|
| 71 |
+
}
|
| 72 |
+
|
| 73 |
+
.card h2 {
|
| 74 |
+
font-size: 1.5rem;
|
| 75 |
+
margin-bottom: 0.5rem;
|
| 76 |
+
}
|
| 77 |
+
|
| 78 |
+
.card p {
|
| 79 |
+
color: #94a3b8;
|
| 80 |
+
font-size: 0.9rem;
|
| 81 |
+
}
|
| 82 |
+
|
| 83 |
+
/* Forms & Service Pages */
|
| 84 |
+
.service-container {
|
| 85 |
+
background: var(--card-bg);
|
| 86 |
+
padding: 3rem;
|
| 87 |
+
border-radius: 1rem;
|
| 88 |
+
box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
|
| 89 |
+
width: 100%;
|
| 90 |
+
max-width: 600px;
|
| 91 |
+
border: 1px solid var(--border-color);
|
| 92 |
+
}
|
| 93 |
+
|
| 94 |
+
.service-title {
|
| 95 |
+
margin-bottom: 1.5rem;
|
| 96 |
+
text-align: center;
|
| 97 |
+
}
|
| 98 |
+
|
| 99 |
+
label {
|
| 100 |
+
display: block;
|
| 101 |
+
margin-bottom: 0.5rem;
|
| 102 |
+
color: #cbd5e1;
|
| 103 |
+
}
|
| 104 |
+
|
| 105 |
+
input[type="text"],
|
| 106 |
+
input[type="file"],
|
| 107 |
+
textarea {
|
| 108 |
+
width: 100%;
|
| 109 |
+
padding: 0.75rem;
|
| 110 |
+
margin-bottom: 1.5rem;
|
| 111 |
+
background: var(--bg-color);
|
| 112 |
+
border: 1px solid var(--border-color);
|
| 113 |
+
border-radius: 0.5rem;
|
| 114 |
+
color: var(--text-color);
|
| 115 |
+
font-size: 1rem;
|
| 116 |
+
}
|
| 117 |
+
|
| 118 |
+
input[type="file"]::file-selector-button {
|
| 119 |
+
background: var(--card-bg);
|
| 120 |
+
color: var(--text-color);
|
| 121 |
+
padding: 0.5rem 1rem;
|
| 122 |
+
border: 1px solid var(--border-color);
|
| 123 |
+
border-radius: 0.3rem;
|
| 124 |
+
cursor: pointer;
|
| 125 |
+
margin-right: 1rem;
|
| 126 |
+
}
|
| 127 |
+
|
| 128 |
+
button {
|
| 129 |
+
width: 100%;
|
| 130 |
+
padding: 0.6rem;
|
| 131 |
+
background: var(--gradient-1);
|
| 132 |
+
color: white;
|
| 133 |
+
border: none;
|
| 134 |
+
border-radius: 0.5rem;
|
| 135 |
+
font-size: 0.98rem;
|
| 136 |
+
font-weight: 600;
|
| 137 |
+
cursor: pointer;
|
| 138 |
+
transition: filter 0.18s ease;
|
| 139 |
+
}
|
| 140 |
+
|
| 141 |
+
button:hover {
|
| 142 |
+
filter: brightness(1.1);
|
| 143 |
+
}
|
| 144 |
+
|
| 145 |
+
.result-box {
|
| 146 |
+
margin-top: 2rem;
|
| 147 |
+
padding: 1rem;
|
| 148 |
+
background: rgba(59, 130, 246, 0.06);
|
| 149 |
+
border: 1px solid rgba(59,130,246,0.12);
|
| 150 |
+
border-radius: 0.5rem;
|
| 151 |
+
text-align: center;
|
| 152 |
+
}
|
| 153 |
+
|
| 154 |
+
.back-link {
|
| 155 |
+
display: block;
|
| 156 |
+
margin-top: 1.5rem;
|
| 157 |
+
text-align: center;
|
| 158 |
+
color: var(--accent-color);
|
| 159 |
+
text-decoration: none;
|
| 160 |
+
}
|
| 161 |
+
|
| 162 |
+
.back-link:hover {
|
| 163 |
+
text-decoration: underline;
|
| 164 |
+
}
|
| 165 |
+
|
| 166 |
+
audio {
|
| 167 |
+
width: 100%;
|
| 168 |
+
margin-top: 10px;
|
| 169 |
+
}
|
| 170 |
+
|
| 171 |
+
/* Make images responsive and add subtle rounding */
|
| 172 |
+
img {
|
| 173 |
+
max-width: 100%;
|
| 174 |
+
height: auto;
|
| 175 |
+
display: block;
|
| 176 |
+
border-radius: 0.5rem;
|
| 177 |
+
}
|
| 178 |
+
|
| 179 |
+
/* Improve focus styles for accessibility */
|
| 180 |
+
input[type="text"],
|
| 181 |
+
input[type="file"],
|
| 182 |
+
textarea {
|
| 183 |
+
transition: box-shadow 0.15s ease, border-color 0.15s ease;
|
| 184 |
+
}
|
| 185 |
+
|
| 186 |
+
input[type="text"]:focus,
|
| 187 |
+
input[type="file"]:focus,
|
| 188 |
+
textarea:focus {
|
| 189 |
+
outline: none;
|
| 190 |
+
box-shadow: 0 0 0 4px rgba(59,130,246,0.08);
|
| 191 |
+
border-color: var(--accent-color);
|
| 192 |
+
}
|
templates/apriori.html
ADDED
|
@@ -0,0 +1,209 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{% extends "layout.html" %}
|
| 2 |
+
|
| 3 |
+
{% block title %}Association Rules - Quantum AI{% endblock %}
|
| 4 |
+
|
| 5 |
+
{% block page_header %}
|
| 6 |
+
<h1>Association Rules</h1>
|
| 7 |
+
<p>Discover hidden relationships in transactional datasets using the A-priori algorithm.</p>
|
| 8 |
+
{% endblock %}
|
| 9 |
+
|
| 10 |
+
{% block content %}
|
| 11 |
+
<div class="service-panel glass-card">
|
| 12 |
+
<form action="/apriori" method="post" enctype="multipart/form-data" class="quantum-form">
|
| 13 |
+
<div class="upload-zone" id="dropZone">
|
| 14 |
+
<i class="fas fa-shopping-cart upload-icon"></i>
|
| 15 |
+
<div class="upload-text">
|
| 16 |
+
<strong>Upload Transaction Data</strong>
|
| 17 |
+
<span>Supports .CSV or .XLSX (Each row is a transaction)</span>
|
| 18 |
+
</div>
|
| 19 |
+
<input type="file" name="file" accept=".csv, .xlsx" required>
|
| 20 |
+
</div>
|
| 21 |
+
|
| 22 |
+
<div class="settings-grid">
|
| 23 |
+
<div class="input-group">
|
| 24 |
+
<label>Metric</label>
|
| 25 |
+
<select name="metric" class="quantum-select">
|
| 26 |
+
<option value="lift" selected>Lift</option>
|
| 27 |
+
<option value="confidence">Confidence</option>
|
| 28 |
+
</select>
|
| 29 |
+
</div>
|
| 30 |
+
<div class="input-group">
|
| 31 |
+
<label>Min Support</label>
|
| 32 |
+
<input type="number" name="min_support" value="0.1" min="0.01" max="1" step="0.01" required>
|
| 33 |
+
</div>
|
| 34 |
+
<div class="input-group">
|
| 35 |
+
<label>Min Threshold (Confidence/Lift)</label>
|
| 36 |
+
<input type="number" name="min_threshold" value="0.7" min="0.1" max="10" step="0.1" required>
|
| 37 |
+
</div>
|
| 38 |
+
<div class="input-group checkbox-group">
|
| 39 |
+
<label class="checkbox-container">
|
| 40 |
+
<input type="checkbox" name="has_header">
|
| 41 |
+
<span class="checkmark"></span>
|
| 42 |
+
File has header row
|
| 43 |
+
</label>
|
| 44 |
+
</div>
|
| 45 |
+
</div>
|
| 46 |
+
|
| 47 |
+
<button type="submit" class="btn-quantum full-width">
|
| 48 |
+
<i class="fas fa-magic"></i> Generate Rules
|
| 49 |
+
</button>
|
| 50 |
+
</form>
|
| 51 |
+
|
| 52 |
+
{% if error %}
|
| 53 |
+
<div class="error-msg animate-fade-in">
|
| 54 |
+
<i class="fas fa-exclamation-triangle"></i> {{ error }}
|
| 55 |
+
</div>
|
| 56 |
+
{% endif %}
|
| 57 |
+
|
| 58 |
+
{% if rules %}
|
| 59 |
+
<div class="result-container animate-fade-in">
|
| 60 |
+
<div class="result-header">Mining Results: {{ rules|length }} rules discovered</div>
|
| 61 |
+
|
| 62 |
+
<div class="table-container">
|
| 63 |
+
<table class="quantum-table">
|
| 64 |
+
<thead>
|
| 65 |
+
<tr>
|
| 66 |
+
<th>Antecedents (If)</th>
|
| 67 |
+
<th>Consequents (Then)</th>
|
| 68 |
+
<th>Support</th>
|
| 69 |
+
<th>Confidence</th>
|
| 70 |
+
<th>Lift</th>
|
| 71 |
+
</tr>
|
| 72 |
+
</thead>
|
| 73 |
+
<tbody>
|
| 74 |
+
{% for rule in rules %}
|
| 75 |
+
<tr>
|
| 76 |
+
<td><span class="tag">{{ rule.antecedents | join(', ') }}</span></td>
|
| 77 |
+
<td><span class="tag consequent">{{ rule.consequents | join(', ') }}</span></td>
|
| 78 |
+
<td>{{ "%.4f"|format(rule.support) }}</td>
|
| 79 |
+
<td>{{ "%.4f"|format(rule.confidence) }}</td>
|
| 80 |
+
<td>{{ "%.4f"|format(rule.lift) }}</td>
|
| 81 |
+
</tr>
|
| 82 |
+
{% endfor %}
|
| 83 |
+
</tbody>
|
| 84 |
+
</table>
|
| 85 |
+
</div>
|
| 86 |
+
</div>
|
| 87 |
+
{% endif %}
|
| 88 |
+
</div>
|
| 89 |
+
|
| 90 |
+
<style>
|
| 91 |
+
.service-panel {
|
| 92 |
+
max-width: 1000px;
|
| 93 |
+
margin: 0 auto;
|
| 94 |
+
}
|
| 95 |
+
|
| 96 |
+
.settings-grid {
|
| 97 |
+
display: grid;
|
| 98 |
+
grid-template-columns: 1fr 1fr;
|
| 99 |
+
gap: 1.5rem;
|
| 100 |
+
margin-top: 1.5rem;
|
| 101 |
+
}
|
| 102 |
+
|
| 103 |
+
.input-group label {
|
| 104 |
+
font-size: 0.85rem;
|
| 105 |
+
font-weight: 700;
|
| 106 |
+
color: var(--text-secondary);
|
| 107 |
+
text-transform: uppercase;
|
| 108 |
+
margin-bottom: 0.5rem;
|
| 109 |
+
display: block;
|
| 110 |
+
}
|
| 111 |
+
|
| 112 |
+
input[type="number"] {
|
| 113 |
+
background: rgba(0, 0, 0, 0.2);
|
| 114 |
+
border: 1px solid var(--glass-border);
|
| 115 |
+
border-radius: var(--radius-md);
|
| 116 |
+
padding: 0.75rem 1rem;
|
| 117 |
+
color: var(--text-primary);
|
| 118 |
+
font-size: 1rem;
|
| 119 |
+
width: 100%;
|
| 120 |
+
}
|
| 121 |
+
|
| 122 |
+
.table-container {
|
| 123 |
+
margin-top: 1.5rem;
|
| 124 |
+
overflow-x: auto;
|
| 125 |
+
background: rgba(0, 0, 0, 0.2);
|
| 126 |
+
border-radius: var(--radius-lg);
|
| 127 |
+
border: 1px solid var(--glass-border);
|
| 128 |
+
}
|
| 129 |
+
|
| 130 |
+
.quantum-table {
|
| 131 |
+
width: 100%;
|
| 132 |
+
border-collapse: collapse;
|
| 133 |
+
text-align: left;
|
| 134 |
+
}
|
| 135 |
+
|
| 136 |
+
.quantum-table th {
|
| 137 |
+
padding: 1rem;
|
| 138 |
+
background: rgba(255, 255, 255, 0.05);
|
| 139 |
+
color: var(--text-secondary);
|
| 140 |
+
font-size: 0.8rem;
|
| 141 |
+
text-transform: uppercase;
|
| 142 |
+
letter-spacing: 1px;
|
| 143 |
+
}
|
| 144 |
+
|
| 145 |
+
.quantum-table td {
|
| 146 |
+
padding: 1rem;
|
| 147 |
+
border-top: 1px solid var(--glass-border);
|
| 148 |
+
font-size: 0.9rem;
|
| 149 |
+
}
|
| 150 |
+
|
| 151 |
+
.tag {
|
| 152 |
+
background: rgba(59, 130, 246, 0.2);
|
| 153 |
+
color: #60a5fa;
|
| 154 |
+
padding: 2px 8px;
|
| 155 |
+
border-radius: 4px;
|
| 156 |
+
font-size: 0.8rem;
|
| 157 |
+
font-weight: 600;
|
| 158 |
+
}
|
| 159 |
+
|
| 160 |
+
.tag.consequent {
|
| 161 |
+
background: rgba(168, 85, 247, 0.2);
|
| 162 |
+
color: #c084fc;
|
| 163 |
+
}
|
| 164 |
+
|
| 165 |
+
.quantum-select {
|
| 166 |
+
background: rgba(0, 0, 0, 0.2);
|
| 167 |
+
border: 1px solid var(--glass-border);
|
| 168 |
+
border-radius: var(--radius-md);
|
| 169 |
+
padding: 0.75rem 1rem;
|
| 170 |
+
color: var(--text-primary);
|
| 171 |
+
font-size: 1rem;
|
| 172 |
+
width: 100%;
|
| 173 |
+
cursor: pointer;
|
| 174 |
+
}
|
| 175 |
+
|
| 176 |
+
.quantum-select option {
|
| 177 |
+
background: #1a1a2e;
|
| 178 |
+
color: white;
|
| 179 |
+
}
|
| 180 |
+
|
| 181 |
+
.checkbox-group {
|
| 182 |
+
display: flex;
|
| 183 |
+
align-items: flex-end;
|
| 184 |
+
}
|
| 185 |
+
|
| 186 |
+
.checkbox-container {
|
| 187 |
+
display: flex;
|
| 188 |
+
align-items: center;
|
| 189 |
+
cursor: pointer;
|
| 190 |
+
font-size: 0.9rem;
|
| 191 |
+
color: var(--text-secondary);
|
| 192 |
+
user-select: none;
|
| 193 |
+
}
|
| 194 |
+
|
| 195 |
+
.checkbox-container input {
|
| 196 |
+
margin-right: 10px;
|
| 197 |
+
width: 18px;
|
| 198 |
+
height: 18px;
|
| 199 |
+
}
|
| 200 |
+
|
| 201 |
+
.error-msg {
|
| 202 |
+
background: rgba(239, 68, 68, 0.1);
|
| 203 |
+
color: #ef4444;
|
| 204 |
+
padding: 1rem;
|
| 205 |
+
border-radius: var(--radius-md);
|
| 206 |
+
margin: 1rem 0;
|
| 207 |
+
}
|
| 208 |
+
</style>
|
| 209 |
+
{% endblock %}
|
templates/clustering.html
ADDED
|
@@ -0,0 +1,185 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{% extends "layout.html" %}
|
| 2 |
+
|
| 3 |
+
{% block title %}Data Clusters - Quantum AI{% endblock %}
|
| 4 |
+
|
| 5 |
+
{% block page_header %}
|
| 6 |
+
<h1>Data Clusters</h1>
|
| 7 |
+
<p>Unsupervised grouping of multivariate datasets using K-Means.</p>
|
| 8 |
+
{% endblock %}
|
| 9 |
+
|
| 10 |
+
{% block content %}
|
| 11 |
+
<div class="service-panel glass-card">
|
| 12 |
+
<form action="/clustering" method="post" enctype="multipart/form-data" class="quantum-form">
|
| 13 |
+
<div class="upload-zone" id="dropZone">
|
| 14 |
+
<i class="fas fa-file-csv upload-icon"></i>
|
| 15 |
+
<div class="upload-text">
|
| 16 |
+
<strong>Upload Data Structure</strong>
|
| 17 |
+
<span>Supports .CSV or .XLSX datasets</span>
|
| 18 |
+
</div>
|
| 19 |
+
<input type="file" name="file" accept=".csv, .xlsx" required onchange="handleFile(this)">
|
| 20 |
+
</div>
|
| 21 |
+
|
| 22 |
+
<div class="cluster-settings">
|
| 23 |
+
<div class="input-group">
|
| 24 |
+
<label>Cluster Centroids (K)</label>
|
| 25 |
+
<input type="number" name="clusters" value="3" min="2" max="10" required>
|
| 26 |
+
</div>
|
| 27 |
+
</div>
|
| 28 |
+
|
| 29 |
+
<button type="submit" class="btn-quantum full-width">
|
| 30 |
+
<i class="fas fa-chart-pie"></i> Map Clusters
|
| 31 |
+
</button>
|
| 32 |
+
</form>
|
| 33 |
+
|
| 34 |
+
{% if error %}
|
| 35 |
+
<div class="error-msg animate-fade-in">
|
| 36 |
+
<i class="fas fa-exclamation-triangle"></i> {{ error }}
|
| 37 |
+
</div>
|
| 38 |
+
{% endif %}
|
| 39 |
+
|
| 40 |
+
{% if plot_url %}
|
| 41 |
+
<div class="result-container animate-fade-in">
|
| 42 |
+
<div class="result-header">Clustering Schema Visualization</div>
|
| 43 |
+
|
| 44 |
+
<div class="visualization-box">
|
| 45 |
+
<img src="data:image/png;base64,{{ plot_url }}" alt="Clustering Plot">
|
| 46 |
+
</div>
|
| 47 |
+
|
| 48 |
+
{% if cluster_info %}
|
| 49 |
+
<div class="cluster-stats">
|
| 50 |
+
<label>Population Distribution</label>
|
| 51 |
+
<div class="stats-grid">
|
| 52 |
+
{% for cluster, count in cluster_info.items() %}
|
| 53 |
+
<div class="stat-card">
|
| 54 |
+
<span class="cluster-id">Cluster {{ cluster }}</span>
|
| 55 |
+
<span class="count-value">{{ count }} Entities</span>
|
| 56 |
+
</div>
|
| 57 |
+
{% endfor %}
|
| 58 |
+
</div>
|
| 59 |
+
</div>
|
| 60 |
+
{% endif %}
|
| 61 |
+
</div>
|
| 62 |
+
{% endif %}
|
| 63 |
+
</div>
|
| 64 |
+
|
| 65 |
+
<style>
|
| 66 |
+
.service-panel {
|
| 67 |
+
max-width: 900px;
|
| 68 |
+
margin: 0 auto;
|
| 69 |
+
}
|
| 70 |
+
|
| 71 |
+
.upload-zone {
|
| 72 |
+
border: 2px dashed var(--glass-border);
|
| 73 |
+
border-radius: var(--radius-lg);
|
| 74 |
+
padding: 3rem;
|
| 75 |
+
text-align: center;
|
| 76 |
+
position: relative;
|
| 77 |
+
background: rgba(0, 0, 0, 0.1);
|
| 78 |
+
}
|
| 79 |
+
|
| 80 |
+
.upload-zone input {
|
| 81 |
+
position: absolute;
|
| 82 |
+
inset: 0;
|
| 83 |
+
opacity: 0;
|
| 84 |
+
cursor: pointer;
|
| 85 |
+
}
|
| 86 |
+
|
| 87 |
+
.upload-icon {
|
| 88 |
+
font-size: 3rem;
|
| 89 |
+
color: var(--accent-blue);
|
| 90 |
+
margin-bottom: 1rem;
|
| 91 |
+
}
|
| 92 |
+
|
| 93 |
+
.cluster-settings {
|
| 94 |
+
margin-top: 1.5rem;
|
| 95 |
+
}
|
| 96 |
+
|
| 97 |
+
.input-group label {
|
| 98 |
+
font-size: 0.85rem;
|
| 99 |
+
font-weight: 700;
|
| 100 |
+
color: var(--text-secondary);
|
| 101 |
+
text-transform: uppercase;
|
| 102 |
+
margin-bottom: 0.5rem;
|
| 103 |
+
display: block;
|
| 104 |
+
}
|
| 105 |
+
|
| 106 |
+
input[type="number"] {
|
| 107 |
+
background: rgba(0, 0, 0, 0.2);
|
| 108 |
+
border: 1px solid var(--glass-border);
|
| 109 |
+
border-radius: var(--radius-md);
|
| 110 |
+
padding: 0.75rem 1rem;
|
| 111 |
+
color: var(--text-primary);
|
| 112 |
+
font-size: 1.1rem;
|
| 113 |
+
width: 100px;
|
| 114 |
+
}
|
| 115 |
+
|
| 116 |
+
.visualization-box {
|
| 117 |
+
background: white;
|
| 118 |
+
/* Contrast for charts */
|
| 119 |
+
padding: 1.5rem;
|
| 120 |
+
border-radius: var(--radius-lg);
|
| 121 |
+
margin-top: 1rem;
|
| 122 |
+
display: flex;
|
| 123 |
+
justify-content: center;
|
| 124 |
+
overflow: hidden;
|
| 125 |
+
}
|
| 126 |
+
|
| 127 |
+
.visualization-box img {
|
| 128 |
+
max-width: 100%;
|
| 129 |
+
height: auto;
|
| 130 |
+
}
|
| 131 |
+
|
| 132 |
+
.cluster-stats {
|
| 133 |
+
margin-top: 2rem;
|
| 134 |
+
}
|
| 135 |
+
|
| 136 |
+
.cluster-stats label {
|
| 137 |
+
color: var(--text-secondary);
|
| 138 |
+
font-weight: 600;
|
| 139 |
+
display: block;
|
| 140 |
+
margin-bottom: 1rem;
|
| 141 |
+
}
|
| 142 |
+
|
| 143 |
+
.stats-grid {
|
| 144 |
+
display: grid;
|
| 145 |
+
grid-template-columns: repeat(auto-fill, minmax(180px, 1fr));
|
| 146 |
+
gap: 1rem;
|
| 147 |
+
}
|
| 148 |
+
|
| 149 |
+
.stat-card {
|
| 150 |
+
background: rgba(255, 255, 255, 0.03);
|
| 151 |
+
padding: 1.25rem;
|
| 152 |
+
border-radius: var(--radius-md);
|
| 153 |
+
display: flex;
|
| 154 |
+
flex-direction: column;
|
| 155 |
+
border-left: 3px solid var(--accent-purple);
|
| 156 |
+
}
|
| 157 |
+
|
| 158 |
+
.cluster-id {
|
| 159 |
+
font-size: 0.8rem;
|
| 160 |
+
font-weight: 700;
|
| 161 |
+
color: var(--accent-purple);
|
| 162 |
+
text-transform: uppercase;
|
| 163 |
+
margin-bottom: 4px;
|
| 164 |
+
}
|
| 165 |
+
|
| 166 |
+
.count-value {
|
| 167 |
+
font-size: 1.2rem;
|
| 168 |
+
font-weight: 800;
|
| 169 |
+
}
|
| 170 |
+
|
| 171 |
+
.error-msg {
|
| 172 |
+
background: rgba(239, 68, 68, 0.1);
|
| 173 |
+
color: #ef4444;
|
| 174 |
+
padding: 1rem;
|
| 175 |
+
border-radius: var(--radius-md);
|
| 176 |
+
margin: 1rem 0;
|
| 177 |
+
}
|
| 178 |
+
</style>
|
| 179 |
+
|
| 180 |
+
<script>
|
| 181 |
+
function handleFile(input) {
|
| 182 |
+
// Simple visual feedback could go here
|
| 183 |
+
}
|
| 184 |
+
</script>
|
| 185 |
+
{% endblock %}
|
templates/dbscan.html
ADDED
|
@@ -0,0 +1,168 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{% extends "layout.html" %}
|
| 2 |
+
|
| 3 |
+
{% block title %}DBSCAN Clusters - Quantum AI{% endblock %}
|
| 4 |
+
|
| 5 |
+
{% block page_header %}
|
| 6 |
+
<h1>DBSCAN Clusters</h1>
|
| 7 |
+
<p>Density-Based Spatial Clustering of Applications with Noise (DBSCAN).</p>
|
| 8 |
+
{% endblock %}
|
| 9 |
+
|
| 10 |
+
{% block content %}
|
| 11 |
+
<div class="service-panel glass-card">
|
| 12 |
+
<form action="/dbscan" method="post" enctype="multipart/form-data" class="quantum-form">
|
| 13 |
+
<div class="upload-zone" id="dropZone">
|
| 14 |
+
<i class="fas fa-braille upload-icon"></i>
|
| 15 |
+
<div class="upload-text">
|
| 16 |
+
<strong>Upload Data Structure</strong>
|
| 17 |
+
<span>Supports .CSV or .XLSX datasets</span>
|
| 18 |
+
</div>
|
| 19 |
+
<input type="file" name="file" accept=".csv, .xlsx" required>
|
| 20 |
+
</div>
|
| 21 |
+
|
| 22 |
+
<div class="settings-grid">
|
| 23 |
+
<div class="input-group">
|
| 24 |
+
<label>Epsilon (eps)</label>
|
| 25 |
+
<input type="number" name="eps" value="0.5" min="0.01" max="10" step="0.01" required>
|
| 26 |
+
</div>
|
| 27 |
+
<div class="input-group">
|
| 28 |
+
<label>Min Samples</label>
|
| 29 |
+
<input type="number" name="min_samples" value="5" min="1" max="100" required>
|
| 30 |
+
</div>
|
| 31 |
+
</div>
|
| 32 |
+
|
| 33 |
+
<button type="submit" class="btn-quantum full-width">
|
| 34 |
+
<i class="fas fa-project-diagram"></i> Analyze Density
|
| 35 |
+
</button>
|
| 36 |
+
</form>
|
| 37 |
+
|
| 38 |
+
{% if error %}
|
| 39 |
+
<div class="error-msg animate-fade-in">
|
| 40 |
+
<i class="fas fa-exclamation-triangle"></i> {{ error }}
|
| 41 |
+
</div>
|
| 42 |
+
{% endif %}
|
| 43 |
+
|
| 44 |
+
{% if plot_url %}
|
| 45 |
+
<div class="result-container animate-fade-in">
|
| 46 |
+
<div class="result-header">Density Map Visualization</div>
|
| 47 |
+
|
| 48 |
+
<div class="visualization-box">
|
| 49 |
+
<img src="data:image/png;base64,{{ plot_url }}" alt="DBSCAN Plot">
|
| 50 |
+
</div>
|
| 51 |
+
|
| 52 |
+
{% if cluster_info %}
|
| 53 |
+
<div class="cluster-stats">
|
| 54 |
+
<label>Density Distribution (-1 denotes noise)</label>
|
| 55 |
+
<div class="stats-grid">
|
| 56 |
+
{% for cluster, count in cluster_info.items() %}
|
| 57 |
+
<div class="stat-card {% if cluster|int == -1 %}noise-card{% endif %}">
|
| 58 |
+
<span class="cluster-id">{% if cluster|int == -1 %}Noise (Outliers){% else %}Cluster {{ cluster }}{%
|
| 59 |
+
endif %}</span>
|
| 60 |
+
<span class="count-value">{{ count }} Entities</span>
|
| 61 |
+
</div>
|
| 62 |
+
{% endfor %}
|
| 63 |
+
</div>
|
| 64 |
+
</div>
|
| 65 |
+
{% endif %}
|
| 66 |
+
</div>
|
| 67 |
+
{% endif %}
|
| 68 |
+
</div>
|
| 69 |
+
|
| 70 |
+
<style>
|
| 71 |
+
.service-panel {
|
| 72 |
+
max-width: 900px;
|
| 73 |
+
margin: 0 auto;
|
| 74 |
+
}
|
| 75 |
+
|
| 76 |
+
.settings-grid {
|
| 77 |
+
display: grid;
|
| 78 |
+
grid-template-columns: 1fr 1fr;
|
| 79 |
+
gap: 1.5rem;
|
| 80 |
+
margin-top: 1.5rem;
|
| 81 |
+
}
|
| 82 |
+
|
| 83 |
+
.input-group label {
|
| 84 |
+
font-size: 0.85rem;
|
| 85 |
+
font-weight: 700;
|
| 86 |
+
color: var(--text-secondary);
|
| 87 |
+
text-transform: uppercase;
|
| 88 |
+
margin-bottom: 0.5rem;
|
| 89 |
+
display: block;
|
| 90 |
+
}
|
| 91 |
+
|
| 92 |
+
input[type="number"] {
|
| 93 |
+
background: rgba(0, 0, 0, 0.2);
|
| 94 |
+
border: 1px solid var(--glass-border);
|
| 95 |
+
border-radius: var(--radius-md);
|
| 96 |
+
padding: 0.75rem 1rem;
|
| 97 |
+
color: var(--text-primary);
|
| 98 |
+
font-size: 1.1rem;
|
| 99 |
+
width: 100%;
|
| 100 |
+
}
|
| 101 |
+
|
| 102 |
+
.visualization-box {
|
| 103 |
+
background: white;
|
| 104 |
+
padding: 1.5rem;
|
| 105 |
+
border-radius: var(--radius-lg);
|
| 106 |
+
margin-top: 1rem;
|
| 107 |
+
display: flex;
|
| 108 |
+
justify-content: center;
|
| 109 |
+
overflow: hidden;
|
| 110 |
+
}
|
| 111 |
+
|
| 112 |
+
.visualization-box img {
|
| 113 |
+
max-width: 100%;
|
| 114 |
+
height: auto;
|
| 115 |
+
}
|
| 116 |
+
|
| 117 |
+
.cluster-stats {
|
| 118 |
+
margin-top: 2rem;
|
| 119 |
+
}
|
| 120 |
+
|
| 121 |
+
.cluster-stats label {
|
| 122 |
+
color: var(--text-secondary);
|
| 123 |
+
font-weight: 600;
|
| 124 |
+
display: block;
|
| 125 |
+
margin-bottom: 1rem;
|
| 126 |
+
}
|
| 127 |
+
|
| 128 |
+
.stats-grid {
|
| 129 |
+
display: grid;
|
| 130 |
+
grid-template-columns: repeat(auto-fill, minmax(180px, 1fr));
|
| 131 |
+
gap: 1rem;
|
| 132 |
+
}
|
| 133 |
+
|
| 134 |
+
.stat-card {
|
| 135 |
+
background: rgba(255, 255, 255, 0.03);
|
| 136 |
+
padding: 1.25rem;
|
| 137 |
+
border-radius: var(--radius-md);
|
| 138 |
+
display: flex;
|
| 139 |
+
flex-direction: column;
|
| 140 |
+
border-left: 3px solid var(--accent-blue);
|
| 141 |
+
}
|
| 142 |
+
|
| 143 |
+
.stat-card.noise-card {
|
| 144 |
+
border-left-color: #ef4444;
|
| 145 |
+
}
|
| 146 |
+
|
| 147 |
+
.cluster-id {
|
| 148 |
+
font-size: 0.8rem;
|
| 149 |
+
font-weight: 700;
|
| 150 |
+
color: var(--text-secondary);
|
| 151 |
+
text-transform: uppercase;
|
| 152 |
+
margin-bottom: 4px;
|
| 153 |
+
}
|
| 154 |
+
|
| 155 |
+
.count-value {
|
| 156 |
+
font-size: 1.2rem;
|
| 157 |
+
font-weight: 800;
|
| 158 |
+
}
|
| 159 |
+
|
| 160 |
+
.error-msg {
|
| 161 |
+
background: rgba(239, 68, 68, 0.1);
|
| 162 |
+
color: #ef4444;
|
| 163 |
+
padding: 1rem;
|
| 164 |
+
border-radius: var(--radius-md);
|
| 165 |
+
margin: 1rem 0;
|
| 166 |
+
}
|
| 167 |
+
</style>
|
| 168 |
+
{% endblock %}
|
templates/gender.html
ADDED
|
@@ -0,0 +1,175 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{% extends "layout.html" %}
|
| 2 |
+
|
| 3 |
+
{% block title %}Gender Discovery - Quantum AI{% endblock %}
|
| 4 |
+
|
| 5 |
+
{% block page_header %}
|
| 6 |
+
<h1>Gender Discovery</h1>
|
| 7 |
+
<p>Upload a visual specimen for neural gender classification.</p>
|
| 8 |
+
{% endblock %}
|
| 9 |
+
|
| 10 |
+
{% block content %}
|
| 11 |
+
<div class="service-panel glass-card">
|
| 12 |
+
<form action="/gender" method="post" enctype="multipart/form-data" class="quantum-form">
|
| 13 |
+
<div class="upload-zone" id="dropZone">
|
| 14 |
+
<i class="fas fa-cloud-upload-alt upload-icon"></i>
|
| 15 |
+
<div class="upload-text">
|
| 16 |
+
<strong>Click to upload</strong> or drag and drop
|
| 17 |
+
<span>PNG, JPG or WEBP (max. 10MB)</span>
|
| 18 |
+
</div>
|
| 19 |
+
<input type="file" name="image" id="fileInput" accept="image/*" required onchange="handleFile(this)">
|
| 20 |
+
</div>
|
| 21 |
+
|
| 22 |
+
<div id="filePreview" class="file-preview"></div>
|
| 23 |
+
|
| 24 |
+
<button type="submit" class="btn-quantum full-width">
|
| 25 |
+
<i class="fas fa-microchip"></i> Run Discovery Engine
|
| 26 |
+
</button>
|
| 27 |
+
</form>
|
| 28 |
+
|
| 29 |
+
{% if result %}
|
| 30 |
+
<div class="result-container animate-fade-in">
|
| 31 |
+
<div class="result-header">Engine Output</div>
|
| 32 |
+
<div class="result-display">
|
| 33 |
+
<span class="label">Detected Gender</span>
|
| 34 |
+
<span class="value">{{ result }}</span>
|
| 35 |
+
</div>
|
| 36 |
+
<div class="result-note">
|
| 37 |
+
<i class="fas fa-info-circle"></i> The vision model identifies facial features to determine classification.
|
| 38 |
+
</div>
|
| 39 |
+
</div>
|
| 40 |
+
{% endif %}
|
| 41 |
+
</div>
|
| 42 |
+
|
| 43 |
+
<style>
|
| 44 |
+
.service-panel {
|
| 45 |
+
max-width: 700px;
|
| 46 |
+
margin: 0 auto;
|
| 47 |
+
}
|
| 48 |
+
|
| 49 |
+
.quantum-form {
|
| 50 |
+
display: flex;
|
| 51 |
+
flex-direction: column;
|
| 52 |
+
gap: 1.5rem;
|
| 53 |
+
}
|
| 54 |
+
|
| 55 |
+
.upload-zone {
|
| 56 |
+
border: 2px dashed var(--glass-border);
|
| 57 |
+
border-radius: var(--radius-lg);
|
| 58 |
+
padding: 3rem 2rem;
|
| 59 |
+
text-align: center;
|
| 60 |
+
cursor: pointer;
|
| 61 |
+
transition: var(--transition);
|
| 62 |
+
position: relative;
|
| 63 |
+
}
|
| 64 |
+
|
| 65 |
+
.upload-zone:hover,
|
| 66 |
+
.upload-zone.dragover {
|
| 67 |
+
border-color: var(--accent-blue);
|
| 68 |
+
background: rgba(0, 210, 255, 0.05);
|
| 69 |
+
}
|
| 70 |
+
|
| 71 |
+
.upload-icon {
|
| 72 |
+
font-size: 3rem;
|
| 73 |
+
color: var(--accent-blue);
|
| 74 |
+
margin-bottom: 1rem;
|
| 75 |
+
}
|
| 76 |
+
|
| 77 |
+
.upload-text {
|
| 78 |
+
color: var(--text-secondary);
|
| 79 |
+
display: flex;
|
| 80 |
+
flex-direction: column;
|
| 81 |
+
gap: 4px;
|
| 82 |
+
}
|
| 83 |
+
|
| 84 |
+
.upload-text strong {
|
| 85 |
+
color: var(--text-primary);
|
| 86 |
+
}
|
| 87 |
+
|
| 88 |
+
#fileInput {
|
| 89 |
+
position: absolute;
|
| 90 |
+
inset: 0;
|
| 91 |
+
opacity: 0;
|
| 92 |
+
cursor: pointer;
|
| 93 |
+
}
|
| 94 |
+
|
| 95 |
+
.full-width {
|
| 96 |
+
width: 100%;
|
| 97 |
+
justify-content: center;
|
| 98 |
+
padding: 1rem;
|
| 99 |
+
font-size: 1.1rem;
|
| 100 |
+
}
|
| 101 |
+
|
| 102 |
+
.file-preview {
|
| 103 |
+
margin-top: 0.5rem;
|
| 104 |
+
color: var(--accent-blue);
|
| 105 |
+
font-weight: 500;
|
| 106 |
+
text-align: center;
|
| 107 |
+
}
|
| 108 |
+
|
| 109 |
+
.result-container {
|
| 110 |
+
margin-top: 2rem;
|
| 111 |
+
padding-top: 2rem;
|
| 112 |
+
border-top: 1px solid var(--glass-border);
|
| 113 |
+
}
|
| 114 |
+
|
| 115 |
+
.result-header {
|
| 116 |
+
text-transform: uppercase;
|
| 117 |
+
letter-spacing: 2px;
|
| 118 |
+
font-size: 0.75rem;
|
| 119 |
+
font-weight: 800;
|
| 120 |
+
color: var(--accent-purple);
|
| 121 |
+
margin-bottom: 1rem;
|
| 122 |
+
}
|
| 123 |
+
|
| 124 |
+
.result-display {
|
| 125 |
+
background: rgba(255, 255, 255, 0.03);
|
| 126 |
+
padding: 1.5rem;
|
| 127 |
+
border-radius: var(--radius-md);
|
| 128 |
+
display: flex;
|
| 129 |
+
justify-content: space-between;
|
| 130 |
+
align-items: center;
|
| 131 |
+
}
|
| 132 |
+
|
| 133 |
+
.result-display .label {
|
| 134 |
+
font-size: 1.1rem;
|
| 135 |
+
color: var(--text-secondary);
|
| 136 |
+
}
|
| 137 |
+
|
| 138 |
+
.result-display .value {
|
| 139 |
+
font-size: 2rem;
|
| 140 |
+
font-weight: 800;
|
| 141 |
+
color: var(--text-primary);
|
| 142 |
+
text-transform: capitalize;
|
| 143 |
+
}
|
| 144 |
+
|
| 145 |
+
.result-note {
|
| 146 |
+
margin-top: 1rem;
|
| 147 |
+
font-size: 0.85rem;
|
| 148 |
+
color: var(--text-secondary);
|
| 149 |
+
font-style: italic;
|
| 150 |
+
}
|
| 151 |
+
</style>
|
| 152 |
+
|
| 153 |
+
<script>
|
| 154 |
+
function handleFile(input) {
|
| 155 |
+
const preview = document.getElementById('filePreview');
|
| 156 |
+
if (input.files && input.files[0]) {
|
| 157 |
+
preview.innerHTML = `<i class="fas fa-file-image"></i> Selected: ${input.files[0].name}`;
|
| 158 |
+
}
|
| 159 |
+
}
|
| 160 |
+
|
| 161 |
+
const dropZone = document.getElementById('dropZone');
|
| 162 |
+
['dragenter', 'dragover'].forEach(name => {
|
| 163 |
+
dropZone.addEventListener(name, (e) => {
|
| 164 |
+
e.preventDefault();
|
| 165 |
+
dropZone.classList.add('dragover');
|
| 166 |
+
});
|
| 167 |
+
});
|
| 168 |
+
['dragleave', 'drop'].forEach(name => {
|
| 169 |
+
dropZone.addEventListener(name, (e) => {
|
| 170 |
+
e.preventDefault();
|
| 171 |
+
dropZone.classList.remove('dragover');
|
| 172 |
+
});
|
| 173 |
+
});
|
| 174 |
+
</script>
|
| 175 |
+
{% endblock %}
|
templates/generate.html
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<!DOCTYPE html>
|
| 2 |
+
<html lang="en">
|
| 3 |
+
|
| 4 |
+
<head>
|
| 5 |
+
<meta charset="UTF-8">
|
| 6 |
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
| 7 |
+
<title>Muse Text Gen - AI Magic Hub</title>
|
| 8 |
+
<link rel="stylesheet" href="{{ url_for('static', filename='css/style.css') }}">
|
| 9 |
+
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css">
|
| 10 |
+
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@400;600;700;900&display=swap" rel="stylesheet">
|
| 11 |
+
</head>
|
| 12 |
+
|
| 13 |
+
<body>
|
| 14 |
+
<div id="loading-overlay">
|
| 15 |
+
<div class="loader"></div>
|
| 16 |
+
<p>Awakening the Muse...</p>
|
| 17 |
+
</div>
|
| 18 |
+
|
| 19 |
+
<div class="container">
|
| 20 |
+
<a href="/" class="back-link"><i class="fas fa-arrow-left"></i> Back to Dashboard</a>
|
| 21 |
+
<header>
|
| 22 |
+
<h1>Muse Text Gen</h1>
|
| 23 |
+
<p class="subtitle">Powered by GPT-2. Enter a prompt and watch the AI weave a narrative beyond imagination.
|
| 24 |
+
</p>
|
| 25 |
+
</header>
|
| 26 |
+
|
| 27 |
+
<div class="form-card">
|
| 28 |
+
<form id="generate-form" method="POST">
|
| 29 |
+
<div class="form-group">
|
| 30 |
+
<label for="prompt">Story Prompt</label>
|
| 31 |
+
<textarea name="prompt" id="prompt" rows="4" required
|
| 32 |
+
placeholder="Once upon a time in a digital realm...">{{ prompt if prompt else '' }}</textarea>
|
| 33 |
+
</div>
|
| 34 |
+
|
| 35 |
+
<div class="slider-container">
|
| 36 |
+
<label for="temperature">Creativity Level: <span id="temp-val">{{ temperature }}</span></label>
|
| 37 |
+
<input type="range" name="temperature" id="temperature" min="0.1" max="1.5" step="0.1"
|
| 38 |
+
value="{{ temperature }}">
|
| 39 |
+
<p style="font-size: 0.75rem; color: var(--text-dim); margin-top: 0.5rem;">Higher values produce
|
| 40 |
+
more creative but less predictable text.</p>
|
| 41 |
+
</div>
|
| 42 |
+
|
| 43 |
+
<button type="submit"><i class="fas fa-sparkles"></i> Conjure Story</button>
|
| 44 |
+
</form>
|
| 45 |
+
|
| 46 |
+
{% if result %}
|
| 47 |
+
<div class="result-box">
|
| 48 |
+
<div class="floating-actions">
|
| 49 |
+
<button class="copy-btn" onclick="copyToClipboard('generated-text')">
|
| 50 |
+
<i class="fas fa-copy"></i> Copy
|
| 51 |
+
</button>
|
| 52 |
+
</div>
|
| 53 |
+
<p style="font-size: 0.875rem; color: var(--text-dim); margin-bottom: 1rem;">Generated Text:</p>
|
| 54 |
+
<div id="generated-text" style="line-height: 1.8; color: var(--text-main);">
|
| 55 |
+
{{ result }}
|
| 56 |
+
</div>
|
| 57 |
+
</div>
|
| 58 |
+
{% endif %}
|
| 59 |
+
</div>
|
| 60 |
+
</div>
|
| 61 |
+
|
| 62 |
+
<script>
|
| 63 |
+
document.getElementById('gen-form').onsubmit = () => {
|
| 64 |
+
document.getElementById('loading-overlay').style.display = 'flex';
|
| 65 |
+
};
|
| 66 |
+
</script>
|
| 67 |
+
</body>
|
| 68 |
+
|
| 69 |
+
</html>
|
templates/image.html
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<!DOCTYPE html>
|
| 2 |
+
<html lang="en">
|
| 3 |
+
|
| 4 |
+
<head>
|
| 5 |
+
<meta charset="UTF-8">
|
| 6 |
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
| 7 |
+
<title>Gender Vision - AI Magic Hub</title>
|
| 8 |
+
<link rel="stylesheet" href="{{ url_for('static', filename='css/style.css') }}">
|
| 9 |
+
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css">
|
| 10 |
+
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@400;600;700;900&display=swap" rel="stylesheet">
|
| 11 |
+
</head>
|
| 12 |
+
|
| 13 |
+
<body>
|
| 14 |
+
<div id="loading-overlay">
|
| 15 |
+
<div class="loader"></div>
|
| 16 |
+
<p>Analyzing Image...</p>
|
| 17 |
+
</div>
|
| 18 |
+
|
| 19 |
+
<div class="container">
|
| 20 |
+
<a href="/" class="back-link"><i class="fas fa-arrow-left"></i> Back to Dashboard</a>
|
| 21 |
+
<header>
|
| 22 |
+
<h1>Gender Vision</h1>
|
| 23 |
+
<p class="subtitle">Upload a portrait to identify gender using our advanced CNN model.</p>
|
| 24 |
+
</header>
|
| 25 |
+
|
| 26 |
+
<div class="form-card">
|
| 27 |
+
<div id="preview-container"
|
| 28 |
+
style="display: none; width: 100%; height: 300px; border-radius: 1.5rem; overflow: hidden; margin-bottom: 2rem; border: 2px dashed var(--glass-border);">
|
| 29 |
+
<img id="preview-img" src="" alt="Preview" style="width: 100%; height: 100%; object-fit: cover;">
|
| 30 |
+
</div>
|
| 31 |
+
|
| 32 |
+
<form id="upload-form" method="POST" enctype="multipart/form-data">
|
| 33 |
+
<div class="form-group">
|
| 34 |
+
<label for="file">Select Image</label>
|
| 35 |
+
<input type="file" name="file" id="file" required accept="image/*">
|
| 36 |
+
</div>
|
| 37 |
+
<button type="submit"><i class="fas fa-search"></i> Start Classification</button>
|
| 38 |
+
</form>
|
| 39 |
+
|
| 40 |
+
{% if prediction %}
|
| 41 |
+
<div class="result-box">
|
| 42 |
+
<p>Result:</p>
|
| 43 |
+
<h2
|
| 44 |
+
style="font-size: 2.5rem; margin-top: 0.5rem; background: linear-gradient(135deg, #818cf8, #c084fc); -webkit-background-clip: text; background-clip: text; -webkit-text-fill-color: transparent;">
|
| 45 |
+
{{ prediction }}
|
| 46 |
+
</h2>
|
| 47 |
+
|
| 48 |
+
{% if "Error" not in prediction %}
|
| 49 |
+
<div class="confidence-container">
|
| 50 |
+
<p style="font-size: 0.875rem; color: var(--text-dim);">Confidence Score:</p>
|
| 51 |
+
<div class="confidence-bar">
|
| 52 |
+
<div class="confidence-fill"
|
| 53 |
+
style="width: {{ (probability * 100) if prediction == 'Male' else ((1 - probability) * 100) }}%;">
|
| 54 |
+
</div>
|
| 55 |
+
</div>
|
| 56 |
+
<p style="font-size: 0.75rem; color: var(--text-dim); text-align: right;">
|
| 57 |
+
{{ ((probability * 100) if prediction == 'Male' else ((1 - probability) * 100))|round(2) }}%
|
| 58 |
+
</p>
|
| 59 |
+
</div>
|
| 60 |
+
{% endif %}
|
| 61 |
+
|
| 62 |
+
{% if "Error" in prediction %}
|
| 63 |
+
<p style="color: #ef4444; margin-top: 1rem;">{{ prediction }}</p>
|
| 64 |
+
{% endif %}
|
| 65 |
+
</div>
|
| 66 |
+
{% endif %}
|
| 67 |
+
</div>
|
| 68 |
+
</div>
|
| 69 |
+
|
| 70 |
+
<script>
|
| 71 |
+
const fileInput = document.getElementById('file');
|
| 72 |
+
const previewContainer = document.getElementById('preview-container');
|
| 73 |
+
const previewImg = document.getElementById('preview-img');
|
| 74 |
+
const form = document.getElementById('upload-form');
|
| 75 |
+
const loadingOverlay = document.getElementById('loading-overlay');
|
| 76 |
+
|
| 77 |
+
fileInput.addEventListener('change', function () {
|
| 78 |
+
const file = this.files[0];
|
| 79 |
+
if (file) {
|
| 80 |
+
const reader = new FileReader();
|
| 81 |
+
reader.onload = function (e) {
|
| 82 |
+
previewImg.src = e.target.result;
|
| 83 |
+
previewContainer.style.display = 'block';
|
| 84 |
+
}
|
| 85 |
+
reader.readAsDataURL(file);
|
| 86 |
+
}
|
| 87 |
+
});
|
| 88 |
+
|
| 89 |
+
form.addEventListener('submit', () => {
|
| 90 |
+
loadingOverlay.style.display = 'flex';
|
| 91 |
+
});
|
| 92 |
+
</script>
|
| 93 |
+
</body>
|
| 94 |
+
|
| 95 |
+
</html>
|
templates/index.html
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{% extends "layout.html" %}
|
| 2 |
+
|
| 3 |
+
{% block title %}Dashboard - Quantum AI Hub{% endblock %}
|
| 4 |
+
|
| 5 |
+
{% block page_header %}
|
| 6 |
+
<h1>Quantum Analytics</h1>
|
| 7 |
+
<p>Select a specialized AI engine to begin processing your data.</p>
|
| 8 |
+
{% endblock %}
|
| 9 |
+
|
| 10 |
+
{% block content %}
|
| 11 |
+
<div class="services-grid">
|
| 12 |
+
<a href="/gender" class="service-card glass-card">
|
| 13 |
+
<div class="card-icon"><i class="fas fa-user-circle"></i></div>
|
| 14 |
+
<h3>Gender Discovery</h3>
|
| 15 |
+
<p>Vision Transformer model trained for high-precision gender classification.</p>
|
| 16 |
+
<div class="card-footer">
|
| 17 |
+
<span class="btn-quantum-sm">Launch Engine</span>
|
| 18 |
+
</div>
|
| 19 |
+
</a>
|
| 20 |
+
|
| 21 |
+
<a href="/textgen" class="service-card glass-card">
|
| 22 |
+
<div class="card-icon"><i class="fas fa-pen-nib"></i></div>
|
| 23 |
+
<h3>Text Synthesis</h3>
|
| 24 |
+
<p>Creative language generation powered by GPT-2 architecture.</p>
|
| 25 |
+
<div class="card-footer">
|
| 26 |
+
<span class="btn-quantum-sm">Launch Engine</span>
|
| 27 |
+
</div>
|
| 28 |
+
</a>
|
| 29 |
+
|
| 30 |
+
<a href="/translate" class="service-card glass-card">
|
| 31 |
+
<div class="card-icon"><i class="fas fa-language"></i></div>
|
| 32 |
+
<h3>Neural Translate</h3>
|
| 33 |
+
<p>Advanced English-to-Urdu translation using sequence models.</p>
|
| 34 |
+
<div class="card-footer">
|
| 35 |
+
<span class="btn-quantum-sm">Launch Engine</span>
|
| 36 |
+
</div>
|
| 37 |
+
</a>
|
| 38 |
+
|
| 39 |
+
<a href="/sentiment" class="service-card glass-card">
|
| 40 |
+
<div class="card-icon"><i class="fas fa-smile-beam"></i></div>
|
| 41 |
+
<h3>Empathy Engine</h3>
|
| 42 |
+
<p>Analyze emotional valence in both text and vocal inputs.</p>
|
| 43 |
+
<div class="card-footer">
|
| 44 |
+
<span class="btn-quantum-sm">Launch Engine</span>
|
| 45 |
+
</div>
|
| 46 |
+
</a>
|
| 47 |
+
|
| 48 |
+
<a href="/qa" class="service-card glass-card">
|
| 49 |
+
<div class="card-icon"><i class="fas fa-brain"></i></div>
|
| 50 |
+
<h3>Cognitive QA</h3>
|
| 51 |
+
<p>Extract precise knowledge from context with DistilBERT.</p>
|
| 52 |
+
<div class="card-footer">
|
| 53 |
+
<span class="btn-quantum-sm">Launch Engine</span>
|
| 54 |
+
</div>
|
| 55 |
+
</a>
|
| 56 |
+
|
| 57 |
+
<a href="/zsl" class="service-card glass-card">
|
| 58 |
+
<div class="card-icon"><i class="fas fa-bullseye"></i></div>
|
| 59 |
+
<h3>Zero-Shot Lab</h3>
|
| 60 |
+
<p>Powerful BART-based classification for any unseen categories.</p>
|
| 61 |
+
<div class="card-footer">
|
| 62 |
+
<span class="btn-quantum-sm">Launch Engine</span>
|
| 63 |
+
</div>
|
| 64 |
+
</a>
|
| 65 |
+
|
| 66 |
+
<a href="/clustering" class="service-card glass-card">
|
| 67 |
+
<div class="card-icon"><i class="fas fa-chart-pie"></i></div>
|
| 68 |
+
<h3>Data Clusters</h3>
|
| 69 |
+
<p>Automated pattern discovery using K-Means clustering.</p>
|
| 70 |
+
<div class="card-footer">
|
| 71 |
+
<span class="btn-quantum-sm">Launch Engine</span>
|
| 72 |
+
</div>
|
| 73 |
+
</a>
|
| 74 |
+
|
| 75 |
+
<a href="/dbscan" class="service-card glass-card">
|
| 76 |
+
<div class="card-icon"><i class="fas fa-braille"></i></div>
|
| 77 |
+
<h3>DBSCAN Lab</h3>
|
| 78 |
+
<p>Density-based clustering to identify complex patterns and outliers.</p>
|
| 79 |
+
<div class="card-footer">
|
| 80 |
+
<span class="btn-quantum-sm">Launch Engine</span>
|
| 81 |
+
</div>
|
| 82 |
+
</a>
|
| 83 |
+
|
| 84 |
+
<a href="/apriori" class="service-card glass-card">
|
| 85 |
+
<div class="card-icon"><i class="fas fa-shopping-cart"></i></div>
|
| 86 |
+
<h3>Market Analytics</h3>
|
| 87 |
+
<p>Generate association rules from transactional data with A-priori.</p>
|
| 88 |
+
<div class="card-footer">
|
| 89 |
+
<span class="btn-quantum-sm">Launch Engine</span>
|
| 90 |
+
</div>
|
| 91 |
+
</a>
|
| 92 |
+
</div>
|
| 93 |
+
|
| 94 |
+
<style>
|
| 95 |
+
.services-grid {
|
| 96 |
+
display: grid;
|
| 97 |
+
grid-template-columns: repeat(auto-fill, minmax(300px, 1fr));
|
| 98 |
+
gap: 1.5rem;
|
| 99 |
+
}
|
| 100 |
+
|
| 101 |
+
.service-card {
|
| 102 |
+
display: flex;
|
| 103 |
+
flex-direction: column;
|
| 104 |
+
gap: 1rem;
|
| 105 |
+
text-decoration: none;
|
| 106 |
+
color: inherit;
|
| 107 |
+
}
|
| 108 |
+
|
| 109 |
+
.card-icon {
|
| 110 |
+
font-size: 2.5rem;
|
| 111 |
+
background: var(--quantum-gradient);
|
| 112 |
+
-webkit-background-clip: text;
|
| 113 |
+
-webkit-text-fill-color: transparent;
|
| 114 |
+
margin-bottom: 0.5rem;
|
| 115 |
+
}
|
| 116 |
+
|
| 117 |
+
.service-card h3 {
|
| 118 |
+
font-size: 1.4rem;
|
| 119 |
+
font-weight: 700;
|
| 120 |
+
}
|
| 121 |
+
|
| 122 |
+
.service-card p {
|
| 123 |
+
color: var(--text-secondary);
|
| 124 |
+
font-size: 0.95rem;
|
| 125 |
+
line-height: 1.6;
|
| 126 |
+
flex-grow: 1;
|
| 127 |
+
}
|
| 128 |
+
|
| 129 |
+
.card-footer {
|
| 130 |
+
margin-top: 1rem;
|
| 131 |
+
display: flex;
|
| 132 |
+
justify-content: flex-end;
|
| 133 |
+
}
|
| 134 |
+
|
| 135 |
+
.btn-quantum-sm {
|
| 136 |
+
font-size: 0.8rem;
|
| 137 |
+
text-transform: uppercase;
|
| 138 |
+
letter-spacing: 1px;
|
| 139 |
+
font-weight: 700;
|
| 140 |
+
color: var(--accent-blue);
|
| 141 |
+
}
|
| 142 |
+
|
| 143 |
+
.service-card:hover .btn-quantum-sm {
|
| 144 |
+
text-decoration: underline;
|
| 145 |
+
}
|
| 146 |
+
</style>
|
| 147 |
+
{% endblock %}
|
templates/layout.html
ADDED
|
@@ -0,0 +1,111 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<!DOCTYPE html>
|
| 2 |
+
<html lang="en">
|
| 3 |
+
|
| 4 |
+
<head>
|
| 5 |
+
<meta charset="UTF-8">
|
| 6 |
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
| 7 |
+
<title>{% block title %}AI Services Hub{% endblock %}</title>
|
| 8 |
+
<link href="https://fonts.googleapis.com/css2?family=Outfit:wght@300;400;500;600;700;800&display=swap"
|
| 9 |
+
rel="stylesheet">
|
| 10 |
+
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css">
|
| 11 |
+
<link rel="stylesheet" href="{{ url_for('static', filename='css/main.css') }}">
|
| 12 |
+
{% block extra_css %}{% endblock %}
|
| 13 |
+
</head>
|
| 14 |
+
|
| 15 |
+
<body>
|
| 16 |
+
<div class="dashboard-wrapper">
|
| 17 |
+
<aside class="sidebar">
|
| 18 |
+
<div class="brand">
|
| 19 |
+
<div class="brand-icon">AI</div>
|
| 20 |
+
<span class="brand-name">Quantum Hub</span>
|
| 21 |
+
</div>
|
| 22 |
+
|
| 23 |
+
<nav class="nav-menu">
|
| 24 |
+
<a href="/" class="nav-link {% if request.path == '/' %}active{% endif %}">
|
| 25 |
+
<i class="fas fa-th-large"></i>
|
| 26 |
+
<span class="nav-text">Dashboard</span>
|
| 27 |
+
</a>
|
| 28 |
+
<a href="/gender" class="nav-link {% if request.path == '/gender' %}active{% endif %}">
|
| 29 |
+
<i class="fas fa-user-circle"></i>
|
| 30 |
+
<span class="nav-text">Gender Discovery</span>
|
| 31 |
+
</a>
|
| 32 |
+
<a href="/textgen" class="nav-link {% if request.path == '/textgen' %}active{% endif %}">
|
| 33 |
+
<i class="fas fa-pen-nib"></i>
|
| 34 |
+
<span class="nav-text">Text Synthesis</span>
|
| 35 |
+
</a>
|
| 36 |
+
<a href="/translate" class="nav-link {% if request.path == '/translate' %}active{% endif %}">
|
| 37 |
+
<i class="fas fa-language"></i>
|
| 38 |
+
<span class="nav-text">Neural Translate</span>
|
| 39 |
+
</a>
|
| 40 |
+
<a href="/sentiment" class="nav-link {% if request.path == '/sentiment' %}active{% endif %}">
|
| 41 |
+
<i class="fas fa-smile-beam"></i>
|
| 42 |
+
<span class="nav-text">Empathy Engine</span>
|
| 43 |
+
</a>
|
| 44 |
+
<a href="/qa" class="nav-link {% if request.path == '/qa' %}active{% endif %}">
|
| 45 |
+
<i class="fas fa-brain"></i>
|
| 46 |
+
<span class="nav-text">Cognitive QA</span>
|
| 47 |
+
</a>
|
| 48 |
+
<a href="/zsl" class="nav-link {% if request.path == '/zsl' %}active{% endif %}">
|
| 49 |
+
<i class="fas fa-bullseye"></i>
|
| 50 |
+
<span class="nav-text">Zero-Shot Lab</span>
|
| 51 |
+
</a>
|
| 52 |
+
<a href="/clustering" class="nav-link {% if request.path == '/clustering' %}active{% endif %}">
|
| 53 |
+
<i class="fas fa-chart-pie"></i>
|
| 54 |
+
<span class="nav-text">Data Clusters</span>
|
| 55 |
+
</a>
|
| 56 |
+
<a href="/dbscan" class="nav-link {% if request.path == '/dbscan' %}active{% endif %}">
|
| 57 |
+
<i class="fas fa-braille"></i>
|
| 58 |
+
<span class="nav-text">DBSCAN Clusters</span>
|
| 59 |
+
</a>
|
| 60 |
+
<a href="/apriori" class="nav-link {% if request.path == '/apriori' %}active{% endif %}">
|
| 61 |
+
<i class="fas fa-shopping-cart"></i>
|
| 62 |
+
<span class="nav-text">Association Rules</span>
|
| 63 |
+
</a>
|
| 64 |
+
</nav>
|
| 65 |
+
</aside>
|
| 66 |
+
|
| 67 |
+
<main class="main-content">
|
| 68 |
+
<header class="top-bar">
|
| 69 |
+
<div class="page-title">
|
| 70 |
+
{% block page_header %}{% endblock %}
|
| 71 |
+
</div>
|
| 72 |
+
<div class="user-profile">
|
| 73 |
+
<div class="status-pill">
|
| 74 |
+
<span class="dot"></span> Online
|
| 75 |
+
</div>
|
| 76 |
+
</div>
|
| 77 |
+
</header>
|
| 78 |
+
|
| 79 |
+
<section class="content-area animate-fade-in">
|
| 80 |
+
{% block content %}{% endblock %}
|
| 81 |
+
</section>
|
| 82 |
+
</main>
|
| 83 |
+
</div>
|
| 84 |
+
|
| 85 |
+
<style>
|
| 86 |
+
.status-pill {
|
| 87 |
+
background: rgba(34, 197, 94, 0.1);
|
| 88 |
+
color: #22c55e;
|
| 89 |
+
padding: 6px 12px;
|
| 90 |
+
border-radius: 999px;
|
| 91 |
+
font-size: 0.85rem;
|
| 92 |
+
font-weight: 600;
|
| 93 |
+
display: flex;
|
| 94 |
+
align-items: center;
|
| 95 |
+
gap: 8px;
|
| 96 |
+
border: 1px solid rgba(34, 197, 94, 0.2);
|
| 97 |
+
}
|
| 98 |
+
|
| 99 |
+
.status-pill .dot {
|
| 100 |
+
width: 8px;
|
| 101 |
+
height: 8px;
|
| 102 |
+
background: #22c55e;
|
| 103 |
+
border-radius: 50%;
|
| 104 |
+
box-shadow: 0 0 10px #22c55e;
|
| 105 |
+
}
|
| 106 |
+
</style>
|
| 107 |
+
|
| 108 |
+
{% block extra_js %}{% endblock %}
|
| 109 |
+
</body>
|
| 110 |
+
|
| 111 |
+
</html>
|
templates/qa.html
ADDED
|
@@ -0,0 +1,262 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{% extends "layout.html" %}
|
| 2 |
+
|
| 3 |
+
{% block title %}Cognitive QA - Quantum AI{% endblock %}
|
| 4 |
+
|
| 5 |
+
{% block page_header %}
|
| 6 |
+
<h1>Cognitive QA</h1>
|
| 7 |
+
<p>Knowledge extraction engine with vocal synthesis output.</p>
|
| 8 |
+
<script>
|
| 9 |
+
function switchTab(type) {
|
| 10 |
+
document.querySelectorAll('.tab-btn').forEach(b => b.classList.remove('active'));
|
| 11 |
+
document.querySelectorAll('.tab-content').forEach(c => c.classList.remove('active'));
|
| 12 |
+
|
| 13 |
+
event.currentTarget.classList.add('active');
|
| 14 |
+
document.getElementById(type + 'Section').classList.add('active');
|
| 15 |
+
}
|
| 16 |
+
</script>
|
| 17 |
+
{% endblock %}
|
| 18 |
+
|
| 19 |
+
{% block content %}
|
| 20 |
+
<div class="qa-container">
|
| 21 |
+
<div class="glass-card">
|
| 22 |
+
<form action="/qa" method="post" enctype="multipart/form-data" class="quantum-form">
|
| 23 |
+
<div class="input-group">
|
| 24 |
+
<label>Context Repository</label>
|
| 25 |
+
<textarea name="context" placeholder="Paste the reference document here..."
|
| 26 |
+
required>{{ context or '' }}</textarea>
|
| 27 |
+
</div>
|
| 28 |
+
|
| 29 |
+
<div class="tabs">
|
| 30 |
+
<button type="button" class="tab-btn active" onclick="switchTab('text')">Type Question</button>
|
| 31 |
+
<button type="button" class="tab-btn" onclick="switchTab('voice')">Voice Question</button>
|
| 32 |
+
</div>
|
| 33 |
+
|
| 34 |
+
<div id="textSection" class="tab-content active">
|
| 35 |
+
<div class="input-group">
|
| 36 |
+
<label>Query Expression</label>
|
| 37 |
+
<div class="search-input">
|
| 38 |
+
<input type="text" name="question" placeholder="Ask a question about the context..."
|
| 39 |
+
value="{{ question or '' }}">
|
| 40 |
+
<i class="fas fa-search"></i>
|
| 41 |
+
</div>
|
| 42 |
+
</div>
|
| 43 |
+
</div>
|
| 44 |
+
|
| 45 |
+
<div id="voiceSection" class="tab-content">
|
| 46 |
+
<div class="upload-zone">
|
| 47 |
+
<i class="fas fa-microphone-alt upload-icon"></i>
|
| 48 |
+
<div class="upload-text">
|
| 49 |
+
<strong>Record Your Query</strong>
|
| 50 |
+
<span>Upload audio for voice-to-voice QA</span>
|
| 51 |
+
</div>
|
| 52 |
+
<input type="file" name="voice" accept="audio/*">
|
| 53 |
+
</div>
|
| 54 |
+
</div>
|
| 55 |
+
|
| 56 |
+
<button type="submit" class="btn-quantum full-width">
|
| 57 |
+
<i class="fas fa-brain"></i> Execute Reasoning
|
| 58 |
+
</button>
|
| 59 |
+
</form>
|
| 60 |
+
|
| 61 |
+
{% if answer %}
|
| 62 |
+
<div class="result-container animate-fade-in">
|
| 63 |
+
<div class="result-header">Reasoning Output</div>
|
| 64 |
+
<div class="answer-box">
|
| 65 |
+
<div class="answer-text">
|
| 66 |
+
<label>Extracted Answer</label>
|
| 67 |
+
<p>{{ answer }}</p>
|
| 68 |
+
</div>
|
| 69 |
+
|
| 70 |
+
<div class="vocal-playback">
|
| 71 |
+
<audio id="vocalOutput"
|
| 72 |
+
src="{{ url_for('static', filename='answer.mp3') }}?v={{ range(1, 99999)|random }}"></audio>
|
| 73 |
+
<button type="button" class="btn-playback" onclick="document.getElementById('vocalOutput').play()">
|
| 74 |
+
<i class="fas fa-volume-up"></i>
|
| 75 |
+
</button>
|
| 76 |
+
</div>
|
| 77 |
+
</div>
|
| 78 |
+
|
| 79 |
+
{% if score %}
|
| 80 |
+
<div class="confidence-meter">
|
| 81 |
+
<div class="meter-track">
|
| 82 |
+
<div class="meter-fill" style="width: {{ score }}%"></div>
|
| 83 |
+
</div>
|
| 84 |
+
<div class="meter-info">
|
| 85 |
+
<span>Confidence Score</span>
|
| 86 |
+
<span>{{ score }}%</span>
|
| 87 |
+
</div>
|
| 88 |
+
</div>
|
| 89 |
+
{% endif %}
|
| 90 |
+
</div>
|
| 91 |
+
{% endif %}
|
| 92 |
+
</div>
|
| 93 |
+
</div>
|
| 94 |
+
|
| 95 |
+
<style>
|
| 96 |
+
.qa-container {
|
| 97 |
+
max-width: 900px;
|
| 98 |
+
margin: 0 auto;
|
| 99 |
+
}
|
| 100 |
+
|
| 101 |
+
textarea {
|
| 102 |
+
width: 100%;
|
| 103 |
+
background: rgba(0, 0, 0, 0.2);
|
| 104 |
+
border: 1px solid var(--glass-border);
|
| 105 |
+
border-radius: var(--radius-md);
|
| 106 |
+
padding: 1.25rem;
|
| 107 |
+
color: var(--text-primary);
|
| 108 |
+
font-size: 1rem;
|
| 109 |
+
min-height: 200px;
|
| 110 |
+
line-height: 1.6;
|
| 111 |
+
}
|
| 112 |
+
|
| 113 |
+
.search-input {
|
| 114 |
+
position: relative;
|
| 115 |
+
}
|
| 116 |
+
|
| 117 |
+
.search-input input {
|
| 118 |
+
width: 100%;
|
| 119 |
+
background: rgba(0, 0, 0, 0.2);
|
| 120 |
+
border: 1px solid var(--glass-border);
|
| 121 |
+
border-radius: var(--radius-md);
|
| 122 |
+
padding: 1rem 1rem 1rem 3rem;
|
| 123 |
+
color: var(--text-primary);
|
| 124 |
+
font-size: 1.1rem;
|
| 125 |
+
}
|
| 126 |
+
|
| 127 |
+
.search-input i {
|
| 128 |
+
position: absolute;
|
| 129 |
+
left: 1rem;
|
| 130 |
+
top: 50%;
|
| 131 |
+
transform: translateY(-50%);
|
| 132 |
+
color: var(--accent-blue);
|
| 133 |
+
}
|
| 134 |
+
|
| 135 |
+
.tabs {
|
| 136 |
+
display: flex;
|
| 137 |
+
gap: 1rem;
|
| 138 |
+
margin-bottom: 2rem;
|
| 139 |
+
border-bottom: 1px solid var(--glass-border);
|
| 140 |
+
padding-bottom: 1rem;
|
| 141 |
+
}
|
| 142 |
+
|
| 143 |
+
.tab-btn {
|
| 144 |
+
background: transparent;
|
| 145 |
+
border: none;
|
| 146 |
+
color: var(--text-secondary);
|
| 147 |
+
font-weight: 600;
|
| 148 |
+
cursor: pointer;
|
| 149 |
+
padding: 0.5rem 1rem;
|
| 150 |
+
transition: var(--transition);
|
| 151 |
+
border-radius: var(--radius-md);
|
| 152 |
+
}
|
| 153 |
+
|
| 154 |
+
.tab-btn.active {
|
| 155 |
+
color: var(--accent-blue);
|
| 156 |
+
background: rgba(0, 210, 255, 0.1);
|
| 157 |
+
}
|
| 158 |
+
|
| 159 |
+
.tab-content {
|
| 160 |
+
display: none;
|
| 161 |
+
}
|
| 162 |
+
|
| 163 |
+
.tab-content.active {
|
| 164 |
+
display: block;
|
| 165 |
+
}
|
| 166 |
+
|
| 167 |
+
.upload-zone {
|
| 168 |
+
border: 2px dashed var(--glass-border);
|
| 169 |
+
border-radius: var(--radius-lg);
|
| 170 |
+
padding: 2.5rem;
|
| 171 |
+
text-align: center;
|
| 172 |
+
position: relative;
|
| 173 |
+
background: rgba(0, 0, 0, 0.1);
|
| 174 |
+
margin-bottom: 1.5rem;
|
| 175 |
+
}
|
| 176 |
+
|
| 177 |
+
.upload-zone input {
|
| 178 |
+
position: absolute;
|
| 179 |
+
inset: 0;
|
| 180 |
+
opacity: 0;
|
| 181 |
+
cursor: pointer;
|
| 182 |
+
}
|
| 183 |
+
|
| 184 |
+
.upload-icon {
|
| 185 |
+
font-size: 2.5rem;
|
| 186 |
+
color: var(--accent-blue);
|
| 187 |
+
margin-bottom: 1rem;
|
| 188 |
+
display: block;
|
| 189 |
+
}
|
| 190 |
+
|
| 191 |
+
.answer-box {
|
| 192 |
+
display: flex;
|
| 193 |
+
justify-content: space-between;
|
| 194 |
+
align-items: center;
|
| 195 |
+
background: rgba(0, 210, 255, 0.05);
|
| 196 |
+
padding: 2rem;
|
| 197 |
+
border-radius: var(--radius-md);
|
| 198 |
+
margin-bottom: 2rem;
|
| 199 |
+
border-left: 5px solid var(--accent-blue);
|
| 200 |
+
}
|
| 201 |
+
|
| 202 |
+
.answer-text label {
|
| 203 |
+
font-size: 0.75rem;
|
| 204 |
+
font-weight: 800;
|
| 205 |
+
text-transform: uppercase;
|
| 206 |
+
color: var(--accent-blue);
|
| 207 |
+
margin-bottom: 0.5rem;
|
| 208 |
+
display: block;
|
| 209 |
+
}
|
| 210 |
+
|
| 211 |
+
.answer-text p {
|
| 212 |
+
font-size: 1.5rem;
|
| 213 |
+
font-weight: 700;
|
| 214 |
+
color: var(--text-primary);
|
| 215 |
+
}
|
| 216 |
+
|
| 217 |
+
.btn-playback {
|
| 218 |
+
width: 60px;
|
| 219 |
+
height: 60px;
|
| 220 |
+
border-radius: 50%;
|
| 221 |
+
background: var(--quantum-gradient);
|
| 222 |
+
border: none;
|
| 223 |
+
color: white;
|
| 224 |
+
font-size: 1.5rem;
|
| 225 |
+
cursor: pointer;
|
| 226 |
+
display: grid;
|
| 227 |
+
place-items: center;
|
| 228 |
+
transition: var(--transition);
|
| 229 |
+
}
|
| 230 |
+
|
| 231 |
+
.btn-playback:hover {
|
| 232 |
+
transform: scale(1.1);
|
| 233 |
+
box-shadow: 0 0 20px rgba(0, 210, 255, 0.4);
|
| 234 |
+
}
|
| 235 |
+
|
| 236 |
+
.confidence-meter {
|
| 237 |
+
margin-top: 1rem;
|
| 238 |
+
}
|
| 239 |
+
|
| 240 |
+
.meter-track {
|
| 241 |
+
height: 8px;
|
| 242 |
+
background: rgba(255, 255, 255, 0.1);
|
| 243 |
+
border-radius: 4px;
|
| 244 |
+
overflow: hidden;
|
| 245 |
+
margin-bottom: 0.5rem;
|
| 246 |
+
}
|
| 247 |
+
|
| 248 |
+
.meter-fill {
|
| 249 |
+
height: 100%;
|
| 250 |
+
background: var(--quantum-gradient);
|
| 251 |
+
box-shadow: 0 0 10px rgba(0, 210, 255, 0.5);
|
| 252 |
+
}
|
| 253 |
+
|
| 254 |
+
.meter-info {
|
| 255 |
+
display: flex;
|
| 256 |
+
justify-content: space-between;
|
| 257 |
+
color: var(--text-secondary);
|
| 258 |
+
font-size: 0.85rem;
|
| 259 |
+
font-weight: 600;
|
| 260 |
+
}
|
| 261 |
+
</style>
|
| 262 |
+
{% endblock %}
|
templates/sentiment.html
ADDED
|
@@ -0,0 +1,210 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{% extends "layout.html" %}
|
| 2 |
+
|
| 3 |
+
{% block title %}Empathy Engine - Quantum AI{% endblock %}
|
| 4 |
+
|
| 5 |
+
{% block page_header %}
|
| 6 |
+
<h1>Empathy Engine</h1>
|
| 7 |
+
<p>Contextual sentiment analysis for text and vocal recordings.</p>
|
| 8 |
+
{% endblock %}
|
| 9 |
+
|
| 10 |
+
{% block content %}
|
| 11 |
+
<div class="sentiment-container">
|
| 12 |
+
<div class="glass-card sentiment-analyzer">
|
| 13 |
+
<form action="/sentiment" method="post" enctype="multipart/form-data" class="quantum-form">
|
| 14 |
+
<div class="tabs">
|
| 15 |
+
<button type="button" class="tab-btn active" onclick="switchTab('text')">Text Analysis</button>
|
| 16 |
+
<button type="button" class="tab-btn" onclick="switchTab('voice')">Vocal Analysis</button>
|
| 17 |
+
</div>
|
| 18 |
+
|
| 19 |
+
<div id="textSection" class="tab-content active">
|
| 20 |
+
<div class="input-group">
|
| 21 |
+
<label>Input Text</label>
|
| 22 |
+
<textarea name="text" placeholder="Explain how you're feeling..."></textarea>
|
| 23 |
+
</div>
|
| 24 |
+
</div>
|
| 25 |
+
|
| 26 |
+
<div id="voiceSection" class="tab-content">
|
| 27 |
+
<div class="upload-zone">
|
| 28 |
+
<i class="fas fa-microphone-alt upload-icon"></i>
|
| 29 |
+
<div class="upload-text">
|
| 30 |
+
<strong>Upload Voice Recording</strong>
|
| 31 |
+
<span>WAV or MP3 format required</span>
|
| 32 |
+
</div>
|
| 33 |
+
<input type="file" name="voice" accept="audio/*">
|
| 34 |
+
</div>
|
| 35 |
+
</div>
|
| 36 |
+
|
| 37 |
+
<button type="submit" class="btn-quantum full-width">
|
| 38 |
+
<i class="fas fa-heartbeat"></i> Analyze Sentiment
|
| 39 |
+
</button>
|
| 40 |
+
</form>
|
| 41 |
+
|
| 42 |
+
{% if result %}
|
| 43 |
+
<div class="result-container animate-fade-in">
|
| 44 |
+
<div class="result-header">Engine Output</div>
|
| 45 |
+
|
| 46 |
+
{% if transcript %}
|
| 47 |
+
<div class="transcript-box">
|
| 48 |
+
<label>Vocal Transcription</label>
|
| 49 |
+
<p>"{{ transcript }}"</p>
|
| 50 |
+
</div>
|
| 51 |
+
{% endif %}
|
| 52 |
+
|
| 53 |
+
<div class="sentiment-meter">
|
| 54 |
+
<div class="meter-label">
|
| 55 |
+
<span>Detected Sentiment</span>
|
| 56 |
+
<span class="sentiment-value highlight-{{ result|lower }}">{{ result }}</span>
|
| 57 |
+
</div>
|
| 58 |
+
<!-- Simple visual score if available, or just a themed box -->
|
| 59 |
+
<div class="sentiment-score-box {{ result|lower }}">
|
| 60 |
+
{% if result|lower == 'positive' %}
|
| 61 |
+
<i class="fas fa-laugh-beam"></i>
|
| 62 |
+
{% elif result|lower == 'negative' %}
|
| 63 |
+
<i class="fas fa-frown-open"></i>
|
| 64 |
+
{% else %}
|
| 65 |
+
<i class="fas fa-meh"></i>
|
| 66 |
+
{% endif %}
|
| 67 |
+
</div>
|
| 68 |
+
</div>
|
| 69 |
+
</div>
|
| 70 |
+
{% endif %}
|
| 71 |
+
</div>
|
| 72 |
+
</div>
|
| 73 |
+
|
| 74 |
+
<style>
|
| 75 |
+
.sentiment-container {
|
| 76 |
+
max-width: 800px;
|
| 77 |
+
margin: 0 auto;
|
| 78 |
+
}
|
| 79 |
+
|
| 80 |
+
.tabs {
|
| 81 |
+
display: flex;
|
| 82 |
+
gap: 1rem;
|
| 83 |
+
margin-bottom: 2rem;
|
| 84 |
+
border-bottom: 1px solid var(--glass-border);
|
| 85 |
+
padding-bottom: 1rem;
|
| 86 |
+
}
|
| 87 |
+
|
| 88 |
+
.tab-btn {
|
| 89 |
+
background: transparent;
|
| 90 |
+
border: none;
|
| 91 |
+
color: var(--text-secondary);
|
| 92 |
+
font-weight: 600;
|
| 93 |
+
cursor: pointer;
|
| 94 |
+
padding: 0.5rem 1rem;
|
| 95 |
+
transition: var(--transition);
|
| 96 |
+
border-radius: var(--radius-md);
|
| 97 |
+
}
|
| 98 |
+
|
| 99 |
+
.tab-btn.active {
|
| 100 |
+
color: var(--accent-blue);
|
| 101 |
+
background: rgba(0, 210, 255, 0.1);
|
| 102 |
+
}
|
| 103 |
+
|
| 104 |
+
.tab-content {
|
| 105 |
+
display: none;
|
| 106 |
+
}
|
| 107 |
+
|
| 108 |
+
.tab-content.active {
|
| 109 |
+
display: block;
|
| 110 |
+
}
|
| 111 |
+
|
| 112 |
+
.upload-zone {
|
| 113 |
+
border: 2px dashed var(--glass-border);
|
| 114 |
+
border-radius: var(--radius-lg);
|
| 115 |
+
padding: 2.5rem;
|
| 116 |
+
text-align: center;
|
| 117 |
+
position: relative;
|
| 118 |
+
}
|
| 119 |
+
|
| 120 |
+
.upload-zone input {
|
| 121 |
+
position: absolute;
|
| 122 |
+
inset: 0;
|
| 123 |
+
opacity: 0;
|
| 124 |
+
cursor: pointer;
|
| 125 |
+
}
|
| 126 |
+
|
| 127 |
+
.transcript-box {
|
| 128 |
+
margin-bottom: 1.5rem;
|
| 129 |
+
padding: 1rem;
|
| 130 |
+
background: rgba(0, 0, 0, 0.2);
|
| 131 |
+
border-radius: var(--radius-md);
|
| 132 |
+
}
|
| 133 |
+
|
| 134 |
+
.transcript-box label {
|
| 135 |
+
font-size: 0.75rem;
|
| 136 |
+
font-weight: 800;
|
| 137 |
+
text-transform: uppercase;
|
| 138 |
+
color: var(--accent-blue);
|
| 139 |
+
}
|
| 140 |
+
|
| 141 |
+
.sentiment-meter {
|
| 142 |
+
display: flex;
|
| 143 |
+
align-items: center;
|
| 144 |
+
gap: 2rem;
|
| 145 |
+
background: rgba(255, 255, 255, 0.03);
|
| 146 |
+
padding: 1.5rem;
|
| 147 |
+
border-radius: var(--radius-md);
|
| 148 |
+
}
|
| 149 |
+
|
| 150 |
+
.meter-label {
|
| 151 |
+
display: flex;
|
| 152 |
+
flex-direction: column;
|
| 153 |
+
flex-grow: 1;
|
| 154 |
+
}
|
| 155 |
+
|
| 156 |
+
.sentiment-value {
|
| 157 |
+
font-size: 2.5rem;
|
| 158 |
+
font-weight: 800;
|
| 159 |
+
text-transform: capitalize;
|
| 160 |
+
}
|
| 161 |
+
|
| 162 |
+
.highlight-positive {
|
| 163 |
+
color: #22c55e;
|
| 164 |
+
}
|
| 165 |
+
|
| 166 |
+
.highlight-negative {
|
| 167 |
+
color: #ef4444;
|
| 168 |
+
}
|
| 169 |
+
|
| 170 |
+
.highlight-neutral {
|
| 171 |
+
color: var(--accent-blue);
|
| 172 |
+
}
|
| 173 |
+
|
| 174 |
+
.sentiment-score-box {
|
| 175 |
+
font-size: 3.5rem;
|
| 176 |
+
}
|
| 177 |
+
|
| 178 |
+
.sentiment-score-box.positive {
|
| 179 |
+
color: #22c55e;
|
| 180 |
+
}
|
| 181 |
+
|
| 182 |
+
.sentiment-score-box.negative {
|
| 183 |
+
color: #ef4444;
|
| 184 |
+
}
|
| 185 |
+
|
| 186 |
+
.sentiment-score-box.neutral {
|
| 187 |
+
color: var(--accent-blue);
|
| 188 |
+
}
|
| 189 |
+
|
| 190 |
+
textarea {
|
| 191 |
+
width: 100%;
|
| 192 |
+
background: rgba(0, 0, 0, 0.2);
|
| 193 |
+
border: 1px solid var(--glass-border);
|
| 194 |
+
border-radius: var(--radius-md);
|
| 195 |
+
padding: 1rem;
|
| 196 |
+
color: var(--text-primary);
|
| 197 |
+
min-height: 150px;
|
| 198 |
+
}
|
| 199 |
+
</style>
|
| 200 |
+
|
| 201 |
+
<script>
|
| 202 |
+
function switchTab(type) {
|
| 203 |
+
document.querySelectorAll('.tab-btn').forEach(b => b.classList.remove('active'));
|
| 204 |
+
document.querySelectorAll('.tab-content').forEach(c => c.classList.remove('active'));
|
| 205 |
+
|
| 206 |
+
event.target.classList.add('active');
|
| 207 |
+
document.getElementById(type + 'Section').classList.add('active');
|
| 208 |
+
}
|
| 209 |
+
</script>
|
| 210 |
+
{% endblock %}
|
templates/textgen.html
ADDED
|
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{% extends "layout.html" %}
|
| 2 |
+
|
| 3 |
+
{% block title %}Text Synthesis - Quantum AI{% endblock %}
|
| 4 |
+
|
| 5 |
+
{% block page_header %}
|
| 6 |
+
<h1>Text Synthesis</h1>
|
| 7 |
+
<p>Neural text generation powered by GPT-2 architecture.</p>
|
| 8 |
+
{% endblock %}
|
| 9 |
+
|
| 10 |
+
{% block content %}
|
| 11 |
+
<div class="service-panel glass-card">
|
| 12 |
+
<form action="/textgen" method="post" class="quantum-form">
|
| 13 |
+
<div class="input-group">
|
| 14 |
+
<label for="prompt">Synthesis Prompt</label>
|
| 15 |
+
<textarea id="prompt" name="prompt" placeholder="Enter a seed sentence for the AI to expand upon..."
|
| 16 |
+
required></textarea>
|
| 17 |
+
</div>
|
| 18 |
+
|
| 19 |
+
<button type="submit" class="btn-quantum full-width">
|
| 20 |
+
<i class="fas fa-magic"></i> Synthesize Text
|
| 21 |
+
</button>
|
| 22 |
+
</form>
|
| 23 |
+
|
| 24 |
+
{% if generated_text %}
|
| 25 |
+
<div class="result-container animate-fade-in">
|
| 26 |
+
<div class="result-header">Engine Output</div>
|
| 27 |
+
<div class="result-display text-output">
|
| 28 |
+
{{ generated_text }}
|
| 29 |
+
</div>
|
| 30 |
+
<div class="result-note">
|
| 31 |
+
<i class="fas fa-info-circle"></i> Output is generated token-by-token based on probability distributions.
|
| 32 |
+
</div>
|
| 33 |
+
</div>
|
| 34 |
+
{% endif %}
|
| 35 |
+
</div>
|
| 36 |
+
|
| 37 |
+
<style>
|
| 38 |
+
.service-panel {
|
| 39 |
+
max-width: 800px;
|
| 40 |
+
margin: 0 auto;
|
| 41 |
+
}
|
| 42 |
+
|
| 43 |
+
.quantum-form {
|
| 44 |
+
display: flex;
|
| 45 |
+
flex-direction: column;
|
| 46 |
+
gap: 1.5rem;
|
| 47 |
+
}
|
| 48 |
+
|
| 49 |
+
.input-group {
|
| 50 |
+
display: flex;
|
| 51 |
+
flex-direction: column;
|
| 52 |
+
gap: 0.5rem;
|
| 53 |
+
}
|
| 54 |
+
|
| 55 |
+
.input-group label {
|
| 56 |
+
font-weight: 600;
|
| 57 |
+
color: var(--text-secondary);
|
| 58 |
+
font-size: 0.9rem;
|
| 59 |
+
text-transform: uppercase;
|
| 60 |
+
letter-spacing: 1px;
|
| 61 |
+
}
|
| 62 |
+
|
| 63 |
+
textarea {
|
| 64 |
+
background: rgba(0, 0, 0, 0.2);
|
| 65 |
+
border: 1px solid var(--glass-border);
|
| 66 |
+
border-radius: var(--radius-md);
|
| 67 |
+
padding: 1rem;
|
| 68 |
+
color: var(--text-primary);
|
| 69 |
+
font-size: 1.1rem;
|
| 70 |
+
min-height: 120px;
|
| 71 |
+
resize: vertical;
|
| 72 |
+
transition: var(--transition);
|
| 73 |
+
}
|
| 74 |
+
|
| 75 |
+
textarea:focus {
|
| 76 |
+
outline: none;
|
| 77 |
+
border-color: var(--accent-blue);
|
| 78 |
+
background: rgba(0, 210, 255, 0.05);
|
| 79 |
+
}
|
| 80 |
+
|
| 81 |
+
.full-width {
|
| 82 |
+
width: 100%;
|
| 83 |
+
justify-content: center;
|
| 84 |
+
padding: 1rem;
|
| 85 |
+
}
|
| 86 |
+
|
| 87 |
+
.text-output {
|
| 88 |
+
line-height: 1.8;
|
| 89 |
+
font-size: 1.1rem;
|
| 90 |
+
white-space: pre-wrap;
|
| 91 |
+
background: rgba(255, 255, 255, 0.03);
|
| 92 |
+
padding: 2rem;
|
| 93 |
+
border-radius: var(--radius-md);
|
| 94 |
+
border-left: 4px solid var(--accent-blue);
|
| 95 |
+
}
|
| 96 |
+
</style>
|
| 97 |
+
{% endblock %}
|
templates/translate.html
ADDED
|
@@ -0,0 +1,112 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{% extends "layout.html" %}
|
| 2 |
+
|
| 3 |
+
{% block title %}Neural Translate - Quantum AI{% endblock %}
|
| 4 |
+
|
| 5 |
+
{% block page_header %}
|
| 6 |
+
<h1>Neural Translate</h1>
|
| 7 |
+
<p>Context-aware English to Urdu sequence-to-sequence translation.</p>
|
| 8 |
+
{% endblock %}
|
| 9 |
+
|
| 10 |
+
{% block content %}
|
| 11 |
+
<div class="service-panel glass-card">
|
| 12 |
+
<form action="/translate" method="post" class="quantum-form">
|
| 13 |
+
<div class="translation-grid">
|
| 14 |
+
<div class="input-group">
|
| 15 |
+
<label>Source (English)</label>
|
| 16 |
+
<textarea name="text" placeholder="Enter text to translate..." required></textarea>
|
| 17 |
+
</div>
|
| 18 |
+
<div class="input-group">
|
| 19 |
+
<label>Target (Urdu)</label>
|
| 20 |
+
<div class="output-area {% if translated_text %}active{% endif %}">
|
| 21 |
+
{% if translated_text %}
|
| 22 |
+
{{ translated_text }}
|
| 23 |
+
{% else %}
|
| 24 |
+
<span class="placeholder">Translation will appear here...</span>
|
| 25 |
+
{% endif %}
|
| 26 |
+
</div>
|
| 27 |
+
</div>
|
| 28 |
+
</div>
|
| 29 |
+
|
| 30 |
+
<button type="submit" class="btn-quantum full-width">
|
| 31 |
+
<i class="fas fa-sync-alt"></i> Execute Translation
|
| 32 |
+
</button>
|
| 33 |
+
</form>
|
| 34 |
+
</div>
|
| 35 |
+
|
| 36 |
+
<style>
|
| 37 |
+
.service-panel {
|
| 38 |
+
max-width: 1000px;
|
| 39 |
+
margin: 0 auto;
|
| 40 |
+
}
|
| 41 |
+
|
| 42 |
+
.translation-grid {
|
| 43 |
+
display: grid;
|
| 44 |
+
grid-template-columns: 1fr 1fr;
|
| 45 |
+
gap: 1.5rem;
|
| 46 |
+
margin-bottom: 1.5rem;
|
| 47 |
+
}
|
| 48 |
+
|
| 49 |
+
.input-group {
|
| 50 |
+
display: flex;
|
| 51 |
+
flex-direction: column;
|
| 52 |
+
gap: 0.5rem;
|
| 53 |
+
}
|
| 54 |
+
|
| 55 |
+
.input-group label {
|
| 56 |
+
font-weight: 600;
|
| 57 |
+
color: var(--text-secondary);
|
| 58 |
+
font-size: 0.85rem;
|
| 59 |
+
text-transform: uppercase;
|
| 60 |
+
letter-spacing: 1px;
|
| 61 |
+
}
|
| 62 |
+
|
| 63 |
+
textarea,
|
| 64 |
+
.output-area {
|
| 65 |
+
background: rgba(0, 0, 0, 0.2);
|
| 66 |
+
border: 1px solid var(--glass-border);
|
| 67 |
+
border-radius: var(--radius-md);
|
| 68 |
+
padding: 1.25rem;
|
| 69 |
+
color: var(--text-primary);
|
| 70 |
+
font-size: 1.2rem;
|
| 71 |
+
min-height: 250px;
|
| 72 |
+
transition: var(--transition);
|
| 73 |
+
}
|
| 74 |
+
|
| 75 |
+
.output-area {
|
| 76 |
+
display: flex;
|
| 77 |
+
align-items: flex-start;
|
| 78 |
+
justify-content: flex-start;
|
| 79 |
+
direction: rtl;
|
| 80 |
+
/* Urdu is RTL */
|
| 81 |
+
font-family: 'Noto Nastaliq Urdu', serif;
|
| 82 |
+
}
|
| 83 |
+
|
| 84 |
+
.output-area.active {
|
| 85 |
+
background: rgba(0, 210, 255, 0.03);
|
| 86 |
+
}
|
| 87 |
+
|
| 88 |
+
.output-area .placeholder {
|
| 89 |
+
color: var(--text-secondary);
|
| 90 |
+
font-size: 1rem;
|
| 91 |
+
font-family: 'Outfit', sans-serif;
|
| 92 |
+
direction: ltr;
|
| 93 |
+
}
|
| 94 |
+
|
| 95 |
+
textarea:focus {
|
| 96 |
+
outline: none;
|
| 97 |
+
border-color: var(--accent-blue);
|
| 98 |
+
}
|
| 99 |
+
|
| 100 |
+
.full-width {
|
| 101 |
+
width: 100%;
|
| 102 |
+
justify-content: center;
|
| 103 |
+
padding: 1rem;
|
| 104 |
+
}
|
| 105 |
+
|
| 106 |
+
@media (max-width: 768px) {
|
| 107 |
+
.translation-grid {
|
| 108 |
+
grid-template-columns: 1fr;
|
| 109 |
+
}
|
| 110 |
+
}
|
| 111 |
+
</style>
|
| 112 |
+
{% endblock %}
|
templates/zsl.html
ADDED
|
@@ -0,0 +1,177 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{% extends "layout.html" %}
|
| 2 |
+
|
| 3 |
+
{% block title %}Zero-Shot Lab - Quantum AI{% endblock %}
|
| 4 |
+
|
| 5 |
+
{% block page_header %}
|
| 6 |
+
<h1>Zero-Shot Lab</h1>
|
| 7 |
+
<p>Dynamic classification engine for unforeseen data categories.</p>
|
| 8 |
+
{% endblock %}
|
| 9 |
+
|
| 10 |
+
{% block content %}
|
| 11 |
+
<div class="service-panel glass-card">
|
| 12 |
+
<form action="/zsl" method="post" class="quantum-form">
|
| 13 |
+
<div class="input-group">
|
| 14 |
+
<label>Abstract Data Input</label>
|
| 15 |
+
<textarea name="text" placeholder="Enter the content you wish to classify..." required></textarea>
|
| 16 |
+
</div>
|
| 17 |
+
|
| 18 |
+
<div class="input-group">
|
| 19 |
+
<label>Candidate Categories (Comma Separated)</label>
|
| 20 |
+
<input type="text" name="labels" placeholder="e.g. Finance, Technology, Health, Sports" required>
|
| 21 |
+
</div>
|
| 22 |
+
|
| 23 |
+
<button type="submit" class="btn-quantum full-width">
|
| 24 |
+
<i class="fas fa-bullseye"></i> Initialize Zero-Shot Analysis
|
| 25 |
+
</button>
|
| 26 |
+
</form>
|
| 27 |
+
|
| 28 |
+
{% if error %}
|
| 29 |
+
<div class="error-msg animate-fade-in">
|
| 30 |
+
<i class="fas fa-exclamation-triangle"></i> {{ error }}
|
| 31 |
+
</div>
|
| 32 |
+
{% endif %}
|
| 33 |
+
|
| 34 |
+
{% if result %}
|
| 35 |
+
<div class="result-container animate-fade-in">
|
| 36 |
+
<div class="result-header">Engine Output</div>
|
| 37 |
+
|
| 38 |
+
<div class="best-prediction">
|
| 39 |
+
<label>Highest Probability Fit</label>
|
| 40 |
+
<div class="prediction-value">
|
| 41 |
+
<span class="category">{{ result.label }}</span>
|
| 42 |
+
<span class="confidence">{{ result.score }}% Confidence</span>
|
| 43 |
+
</div>
|
| 44 |
+
</div>
|
| 45 |
+
|
| 46 |
+
<div class="detailed-results">
|
| 47 |
+
<label>Category Distributions</label>
|
| 48 |
+
<div class="result-grid">
|
| 49 |
+
{% for label, score in result.all_results %}
|
| 50 |
+
<div class="result-row">
|
| 51 |
+
<div class="row-info">
|
| 52 |
+
<span>{{ label }}</span>
|
| 53 |
+
<span>{{ score }}%</span>
|
| 54 |
+
</div>
|
| 55 |
+
<div class="row-bar">
|
| 56 |
+
<div class="bar-fill" style="width: {{ score }}%"></div>
|
| 57 |
+
</div>
|
| 58 |
+
</div>
|
| 59 |
+
{% endfor %}
|
| 60 |
+
</div>
|
| 61 |
+
</div>
|
| 62 |
+
</div>
|
| 63 |
+
{% endif %}
|
| 64 |
+
</div>
|
| 65 |
+
|
| 66 |
+
<style>
|
| 67 |
+
.service-panel {
|
| 68 |
+
max-width: 850px;
|
| 69 |
+
margin: 0 auto;
|
| 70 |
+
}
|
| 71 |
+
|
| 72 |
+
textarea {
|
| 73 |
+
width: 100%;
|
| 74 |
+
background: rgba(0, 0, 0, 0.2);
|
| 75 |
+
border: 1px solid var(--glass-border);
|
| 76 |
+
border-radius: var(--radius-md);
|
| 77 |
+
padding: 1.25rem;
|
| 78 |
+
color: var(--text-primary);
|
| 79 |
+
min-height: 150px;
|
| 80 |
+
}
|
| 81 |
+
|
| 82 |
+
input[type="text"] {
|
| 83 |
+
width: 100%;
|
| 84 |
+
background: rgba(0, 0, 0, 0.2);
|
| 85 |
+
border: 1px solid var(--glass-border);
|
| 86 |
+
border-radius: var(--radius-md);
|
| 87 |
+
padding: 1rem;
|
| 88 |
+
color: var(--text-primary);
|
| 89 |
+
font-size: 1.1rem;
|
| 90 |
+
}
|
| 91 |
+
|
| 92 |
+
.best-prediction {
|
| 93 |
+
background: rgba(0, 210, 255, 0.05);
|
| 94 |
+
padding: 2rem;
|
| 95 |
+
border-radius: var(--radius-md);
|
| 96 |
+
margin-bottom: 2rem;
|
| 97 |
+
border-right: 4px solid var(--accent-blue);
|
| 98 |
+
}
|
| 99 |
+
|
| 100 |
+
.best-prediction label {
|
| 101 |
+
font-size: 0.75rem;
|
| 102 |
+
font-weight: 800;
|
| 103 |
+
text-transform: uppercase;
|
| 104 |
+
color: var(--accent-blue);
|
| 105 |
+
margin-bottom: 0.5rem;
|
| 106 |
+
display: block;
|
| 107 |
+
}
|
| 108 |
+
|
| 109 |
+
.prediction-value {
|
| 110 |
+
display: flex;
|
| 111 |
+
justify-content: space-between;
|
| 112 |
+
align-items: baseline;
|
| 113 |
+
}
|
| 114 |
+
|
| 115 |
+
.prediction-value .category {
|
| 116 |
+
font-size: 2.2rem;
|
| 117 |
+
font-weight: 800;
|
| 118 |
+
color: var(--text-primary);
|
| 119 |
+
}
|
| 120 |
+
|
| 121 |
+
.prediction-value .confidence {
|
| 122 |
+
font-size: 1.2rem;
|
| 123 |
+
font-weight: 600;
|
| 124 |
+
color: var(--accent-purple);
|
| 125 |
+
}
|
| 126 |
+
|
| 127 |
+
.detailed-results label {
|
| 128 |
+
color: var(--text-secondary);
|
| 129 |
+
font-weight: 600;
|
| 130 |
+
font-size: 0.9rem;
|
| 131 |
+
margin-bottom: 1rem;
|
| 132 |
+
display: block;
|
| 133 |
+
}
|
| 134 |
+
|
| 135 |
+
.result-grid {
|
| 136 |
+
display: flex;
|
| 137 |
+
flex-direction: column;
|
| 138 |
+
gap: 1.25rem;
|
| 139 |
+
}
|
| 140 |
+
|
| 141 |
+
.result-row {
|
| 142 |
+
display: flex;
|
| 143 |
+
flex-direction: column;
|
| 144 |
+
gap: 0.5rem;
|
| 145 |
+
}
|
| 146 |
+
|
| 147 |
+
.row-info {
|
| 148 |
+
display: flex;
|
| 149 |
+
justify-content: space-between;
|
| 150 |
+
font-weight: 600;
|
| 151 |
+
font-size: 0.95rem;
|
| 152 |
+
}
|
| 153 |
+
|
| 154 |
+
.row-bar {
|
| 155 |
+
height: 6px;
|
| 156 |
+
background: rgba(255, 255, 255, 0.05);
|
| 157 |
+
border-radius: 3px;
|
| 158 |
+
overflow: hidden;
|
| 159 |
+
}
|
| 160 |
+
|
| 161 |
+
.bar-fill {
|
| 162 |
+
height: 100%;
|
| 163 |
+
background: var(--quantum-gradient);
|
| 164 |
+
}
|
| 165 |
+
|
| 166 |
+
.error-msg {
|
| 167 |
+
margin-top: 1rem;
|
| 168 |
+
background: rgba(239, 68, 68, 0.1);
|
| 169 |
+
color: #ef4444;
|
| 170 |
+
padding: 1rem;
|
| 171 |
+
border-radius: var(--radius-md);
|
| 172 |
+
display: flex;
|
| 173 |
+
align-items: center;
|
| 174 |
+
gap: 10px;
|
| 175 |
+
}
|
| 176 |
+
</style>
|
| 177 |
+
{% endblock %}
|
train_cnn.py
ADDED
|
@@ -0,0 +1,123 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import torch.nn as nn
|
| 3 |
+
import torch.optim as optim
|
| 4 |
+
from torch.utils.data import DataLoader
|
| 5 |
+
from torchvision import datasets, transforms
|
| 6 |
+
import os
|
| 7 |
+
|
| 8 |
+
# --- Improved CNN Architecture ---
|
| 9 |
+
class GenderCNN(nn.Module):
|
| 10 |
+
def __init__(self):
|
| 11 |
+
super(GenderCNN, self).__init__()
|
| 12 |
+
|
| 13 |
+
self.conv_layers = nn.Sequential(
|
| 14 |
+
# Block 1
|
| 15 |
+
nn.Conv2d(3, 32, kernel_size=3, padding=1),
|
| 16 |
+
nn.ReLU(),
|
| 17 |
+
nn.BatchNorm2d(32),
|
| 18 |
+
nn.MaxPool2d(2, 2),
|
| 19 |
+
|
| 20 |
+
# Block 2
|
| 21 |
+
nn.Conv2d(32, 64, kernel_size=3, padding=1),
|
| 22 |
+
nn.ReLU(),
|
| 23 |
+
nn.BatchNorm2d(64),
|
| 24 |
+
nn.MaxPool2d(2, 2),
|
| 25 |
+
|
| 26 |
+
# Block 3 (New Layer for better features)
|
| 27 |
+
nn.Conv2d(64, 128, kernel_size=3, padding=1),
|
| 28 |
+
nn.ReLU(),
|
| 29 |
+
nn.BatchNorm2d(128),
|
| 30 |
+
nn.MaxPool2d(2, 2)
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
self.fc_layers = nn.Sequential(
|
| 34 |
+
nn.Flatten(),
|
| 35 |
+
# Input to linear: 128 * (128/2/2/2) * (128/2/2/2) = 128 * 16 * 16
|
| 36 |
+
nn.Linear(128 * 16 * 16, 256),
|
| 37 |
+
nn.ReLU(),
|
| 38 |
+
nn.Dropout(0.5), # Add dropout to prevent overfitting
|
| 39 |
+
nn.Linear(256, 1),
|
| 40 |
+
nn.Sigmoid()
|
| 41 |
+
)
|
| 42 |
+
|
| 43 |
+
def forward(self, x):
|
| 44 |
+
x = self.conv_layers(x)
|
| 45 |
+
x = self.fc_layers(x)
|
| 46 |
+
return x
|
| 47 |
+
|
| 48 |
+
def main():
|
| 49 |
+
print("--- Starting Improved CNN Training Loop ---")
|
| 50 |
+
|
| 51 |
+
# Device configuration
|
| 52 |
+
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
| 53 |
+
print(f"Using device: {device}")
|
| 54 |
+
|
| 55 |
+
# Set Paths - USER MUST ENSURE DATA IS IN THESE FOLDERS
|
| 56 |
+
# Defaulting to a local 'dataset' folder in the current directory
|
| 57 |
+
data_dir = 'dataset'
|
| 58 |
+
train_path = os.path.join(data_dir, 'train')
|
| 59 |
+
|
| 60 |
+
if not os.path.exists(train_path):
|
| 61 |
+
print(f"ERROR: Training path not found at {train_path}")
|
| 62 |
+
print("Please place your images in: dataset/train/Male and dataset/train/Female")
|
| 63 |
+
return
|
| 64 |
+
|
| 65 |
+
# Hyperparameters
|
| 66 |
+
epochs = 10
|
| 67 |
+
batch_size = 32
|
| 68 |
+
learning_rate = 0.001
|
| 69 |
+
|
| 70 |
+
# Transforms
|
| 71 |
+
transform = transforms.Compose([
|
| 72 |
+
transforms.Resize((128, 128)),
|
| 73 |
+
transforms.RandomHorizontalFlip(), # Data augmentation
|
| 74 |
+
transforms.ToTensor(),
|
| 75 |
+
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
|
| 76 |
+
])
|
| 77 |
+
|
| 78 |
+
# Data Loader
|
| 79 |
+
train_dataset = datasets.ImageFolder(root=train_path, transform=transform)
|
| 80 |
+
dataloader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True)
|
| 81 |
+
|
| 82 |
+
print(f"Dataset found with classes: {train_dataset.classes}")
|
| 83 |
+
# Note: Ensure Male is index 1 or adapt the mapping. Usually ImageFolder sorts alphabetically.
|
| 84 |
+
# Female: 0, Male: 1 matches the original logic.
|
| 85 |
+
|
| 86 |
+
# Initialize Model, Loss, and Optimizer
|
| 87 |
+
model = GenderCNN().to(device)
|
| 88 |
+
criterion = nn.BCELoss()
|
| 89 |
+
optimizer = optim.Adam(model.parameters(), lr=learning_rate)
|
| 90 |
+
|
| 91 |
+
# Training Loop
|
| 92 |
+
model.train()
|
| 93 |
+
for epoch in range(epochs):
|
| 94 |
+
epoch_loss = 0
|
| 95 |
+
correct = 0
|
| 96 |
+
total = 0
|
| 97 |
+
|
| 98 |
+
for images, labels in dataloader:
|
| 99 |
+
images = images.to(device)
|
| 100 |
+
labels = labels.float().unsqueeze(1).to(device)
|
| 101 |
+
|
| 102 |
+
optimizer.zero_grad()
|
| 103 |
+
outputs = model(images)
|
| 104 |
+
loss = criterion(outputs, labels)
|
| 105 |
+
loss.backward()
|
| 106 |
+
optimizer.step()
|
| 107 |
+
|
| 108 |
+
epoch_loss += loss.item()
|
| 109 |
+
predictions = (outputs > 0.5).float()
|
| 110 |
+
correct += (predictions == labels).sum().item()
|
| 111 |
+
total += labels.size(0)
|
| 112 |
+
|
| 113 |
+
avg_loss = epoch_loss / len(dataloader)
|
| 114 |
+
accuracy = (correct / total) * 100
|
| 115 |
+
print(f"Epoch [{epoch+1}/{epochs}] - Loss: {avg_loss:.4f} - Accuracy: {accuracy:.2f}%")
|
| 116 |
+
|
| 117 |
+
# Save Model
|
| 118 |
+
os.makedirs('models', exist_ok=True)
|
| 119 |
+
torch.save(model.state_dict(), 'models/gender_model.pth')
|
| 120 |
+
print("\nTraining Complete! Model saved to models/gender_model.pth")
|
| 121 |
+
|
| 122 |
+
if __name__ == '__main__':
|
| 123 |
+
main()
|
verify_extended.py
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
import sys
|
| 3 |
+
import os
|
| 4 |
+
import torch
|
| 5 |
+
import numpy as np
|
| 6 |
+
import pandas as pd
|
| 7 |
+
import librosa
|
| 8 |
+
from models_loader import loader
|
| 9 |
+
|
| 10 |
+
def test_extended():
|
| 11 |
+
print("--- Starting Extended Model Verification ---")
|
| 12 |
+
|
| 13 |
+
# 1. STT (Speech to Text)
|
| 14 |
+
print("\nTesting STT (Whisper)...")
|
| 15 |
+
if loader.stt_pipeline:
|
| 16 |
+
try:
|
| 17 |
+
# Create a 1-second silent audio array
|
| 18 |
+
audio_array = np.zeros(16000, dtype=np.float32)
|
| 19 |
+
res = loader.stt_pipeline(audio_array)
|
| 20 |
+
print(f"STT Result: {res}")
|
| 21 |
+
except Exception as e:
|
| 22 |
+
print(f"FAILED: STT pipeline error: {e}")
|
| 23 |
+
else:
|
| 24 |
+
print("FAILED: STT pipeline not loaded")
|
| 25 |
+
|
| 26 |
+
# 2. DBSCAN
|
| 27 |
+
print("\nTesting DBSCAN...")
|
| 28 |
+
try:
|
| 29 |
+
from sklearn.cluster import DBSCAN
|
| 30 |
+
data = np.random.rand(10, 2)
|
| 31 |
+
db = DBSCAN(eps=0.3, min_samples=2).fit(data)
|
| 32 |
+
print(f"DBSCAN labels: {db.labels_}")
|
| 33 |
+
except Exception as e:
|
| 34 |
+
print(f"FAILED: DBSCAN error: {e}")
|
| 35 |
+
|
| 36 |
+
# 3. Apriori
|
| 37 |
+
print("\nTesting Apriori...")
|
| 38 |
+
try:
|
| 39 |
+
from mlxtend.frequent_patterns import apriori, association_rules
|
| 40 |
+
from mlxtend.preprocessing import TransactionEncoder
|
| 41 |
+
dataset = [['Milk', 'Onion', 'Nut', 'Kidney Beans', 'Eggs', 'Yogurt'],
|
| 42 |
+
['Dill', 'Onion', 'Nut', 'Kidney Beans', 'Eggs', 'Yogurt'],
|
| 43 |
+
['Milk', 'Apple', 'Kidney Beans', 'Eggs'],
|
| 44 |
+
['Milk', 'Unicorn', 'Corn', 'Kidney Beans', 'Yogurt'],
|
| 45 |
+
['Corn', 'Onion', 'Onion', 'Kidney Beans', 'Ice cream', 'Eggs']]
|
| 46 |
+
te = TransactionEncoder()
|
| 47 |
+
te_ary = te.fit(dataset).transform(dataset)
|
| 48 |
+
df = pd.DataFrame(te_ary, columns=te.columns_)
|
| 49 |
+
freq = apriori(df, min_support=0.6, use_colnames=True)
|
| 50 |
+
rules = association_rules(freq, metric="lift", min_threshold=0.7)
|
| 51 |
+
print(f"Apriori rules found: {len(rules)}")
|
| 52 |
+
except Exception as e:
|
| 53 |
+
print(f"FAILED: Apriori error: {e}")
|
| 54 |
+
|
| 55 |
+
print("\n--- Extended Verification Complete ---")
|
| 56 |
+
|
| 57 |
+
if __name__ == "__main__":
|
| 58 |
+
test_extended()
|
verify_models.py
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
import os
|
| 3 |
+
|
| 4 |
+
# Add current directory to path
|
| 5 |
+
sys.path.append(os.getcwd())
|
| 6 |
+
|
| 7 |
+
from models_loader import loader
|
| 8 |
+
import torch
|
| 9 |
+
from PIL import Image
|
| 10 |
+
import numpy as np
|
| 11 |
+
|
| 12 |
+
def test_models():
|
| 13 |
+
print("--- Starting Model Verification ---")
|
| 14 |
+
|
| 15 |
+
# 1. Sentiment
|
| 16 |
+
print("\nTesting Sentiment Analysis...")
|
| 17 |
+
if loader.sentiment_pipeline:
|
| 18 |
+
res = loader.sentiment_pipeline("I love this project!")
|
| 19 |
+
print(f"Result: {res}")
|
| 20 |
+
else:
|
| 21 |
+
print("FAILED: Sentiment pipeline not loaded")
|
| 22 |
+
|
| 23 |
+
# 2. QA
|
| 24 |
+
print("\nTesting Question Answering...")
|
| 25 |
+
if loader.qa_pipeline:
|
| 26 |
+
res = loader.qa_pipeline(question="What is this?", context="This is a test.")
|
| 27 |
+
print(f"Result: {res}")
|
| 28 |
+
else:
|
| 29 |
+
print("FAILED: QA pipeline not loaded")
|
| 30 |
+
|
| 31 |
+
# 3. Translation
|
| 32 |
+
print("\nTesting Translation (MT-EN-UR)...")
|
| 33 |
+
if loader.translator_pipeline:
|
| 34 |
+
res = loader.translator_pipeline("Hello, how are you?")
|
| 35 |
+
print(f"Result: {res}")
|
| 36 |
+
else:
|
| 37 |
+
print("FAILED: Translation pipeline not loaded")
|
| 38 |
+
|
| 39 |
+
# 4. Text Gen
|
| 40 |
+
print("\nTesting Text Generation...")
|
| 41 |
+
if loader.text_gen_pipeline:
|
| 42 |
+
res = loader.text_gen_pipeline("Once upon a time", max_length=20)
|
| 43 |
+
print(f"Result: {res}")
|
| 44 |
+
else:
|
| 45 |
+
print("FAILED: Text Gen pipeline not loaded")
|
| 46 |
+
|
| 47 |
+
# 5. ZSL
|
| 48 |
+
print("\nTesting Zero-Shot Learning...")
|
| 49 |
+
if loader.zsl_pipeline:
|
| 50 |
+
res = loader.zsl_pipeline("This is about sports.", candidate_labels=["politics", "sports", "cooking"])
|
| 51 |
+
print(f"Result: {res['labels'][0]}")
|
| 52 |
+
else:
|
| 53 |
+
print("FAILED: ZSL pipeline not loaded")
|
| 54 |
+
|
| 55 |
+
# 6. Gender Classifier (Mini)
|
| 56 |
+
print("\nTesting Image Classification (Gender)...")
|
| 57 |
+
if loader.gender_classifier:
|
| 58 |
+
# Create a dummy image
|
| 59 |
+
dummy_img = Image.fromarray(np.uint8(np.random.rand(224,224,3)*255))
|
| 60 |
+
res = loader.gender_classifier(dummy_img)
|
| 61 |
+
print(f"Result: {res}")
|
| 62 |
+
else:
|
| 63 |
+
print("FAILED: Gender classifier pipeline not loaded")
|
| 64 |
+
|
| 65 |
+
print("\n--- Verification Complete ---")
|
| 66 |
+
|
| 67 |
+
if __name__ == "__main__":
|
| 68 |
+
test_models()
|