import React, { useState, useEffect } from 'react'; import { Activity, Zap, Shield, Globe, Lock, Download, ChevronDown, FileText } from 'lucide-react'; import PredictionCard from './ml/PredictionCard'; import ModelPerformance from './ml/ModelPerformance'; import FeatureImportance from './ml/FeatureImportance'; const ProductSection = ({ vertical, id }) => { const [data, setData] = useState(null); const [prediction, setPrediction] = useState(null); const [files, setFiles] = useState([]); const [loading, setLoading] = useState(true); const [showFiles, setShowFiles] = useState(false); const [status, setStatus] = useState(null); const [debugLogs, setDebugLogs] = useState([]); const addDebugLog = (msg) => { setDebugLogs(prev => [`[${new Date().toLocaleTimeString()}] ${msg}`, ...prev].slice(0, 50)); }; const fetchData = async () => { setLoading(true); const apiUrl = import.meta.env.VITE_API_URL || ''; try { // Fetch Data Preview, Files, AND ML Prediction const [previewData, filesData, predictionData] = await Promise.all([ fetch(`${apiUrl}/api/preview/${vertical}`).then(res => res.json()), fetch(`${apiUrl}/api/files/${vertical}`).then(res => res.json()), fetch(`${apiUrl}/api/predict/${vertical}`).then(res => res.json()) ]); console.log("Preview Data:", previewData); console.log("Files Data:", filesData); console.log("Prediction Data:", predictionData); if (previewData.error || predictionData.error || predictionData.detail) { const errorMsg = previewData.error || predictionData.error || predictionData.detail; console.error("API Error:", errorMsg); addDebugLog(`API Error: ${errorMsg}`); // If error is due to loading, start polling if (predictionData.error === "ML Engine Loading" || errorMsg === "Not Found") { pollStatus(); } } else { setData(previewData); setFiles(filesData.files || []); setPrediction(predictionData); setLoading(false); } } catch (err) { console.error("Fetch Error:", err); addDebugLog(`Fetch Error: ${err.message}`); // Only stop loading if we are NOT going to poll // But here we want to poll if fetch failed pollStatus(); } }; useEffect(() => { fetchData(); }, [vertical]); const pollStatus = async () => { const apiUrl = import.meta.env.VITE_API_URL || ''; try { addDebugLog(`Polling ${apiUrl}/api/status...`); // Add cache buster const res = await fetch(`${apiUrl}/api/status?t=${Date.now()}`); // Log Headers for debugging const dateHeader = res.headers.get('date'); const serverHeader = res.headers.get('server'); if (res.status === 429) { addDebugLog("⚠️ Status: 429 Too Many Requests (Rate Limited)"); addDebugLog("Waiting 15s before retry..."); setTimeout(pollStatus, 15000); // Backoff for 15s return; } if (res.status === 404) { addDebugLog(`Status: 404 Not Found`); addDebugLog(`Server: ${serverHeader} | Time: ${dateHeader}`); // CHECK IF IT'S THE OLD BACKEND try { const verRes = await fetch(`${apiUrl}/api/version?t=${Date.now()}`); if (verRes.ok) { const verData = await verRes.json(); addDebugLog(`✓ NEW BACKEND DETECTED: ${verData.version}`); addDebugLog("Status endpoint should be available momentarily..."); } else { const catRes = await fetch(`${apiUrl}/api/catalog?t=${Date.now()}`); if (catRes.ok) { addDebugLog("⚠️ DIAGNOSIS: OLD BACKEND DETECTED (v1.0)"); addDebugLog("The server is online but running old code."); } else { addDebugLog("Diagnosis: Server might be completely down."); } } } catch (err) { addDebugLog("Diagnosis Check Failed."); } setStatus({ detail: "Not Found" }); } else if (res.status === 503) { addDebugLog("Status: 503 Service Unavailable (Initializing)"); const data = await res.json(); setStatus(data); } else if (res.ok) { const data = await res.json(); addDebugLog(`Status: 200 OK (Ready: ${data.ready})`); setStatus(data); if (!data.ready) { setTimeout(pollStatus, 1000); return; } else { addDebugLog("System Ready. Fetching Data..."); // STOP RELOADING, JUST FETCH DATA fetchData(); return; } } else { addDebugLog(`Status: ${res.status} ${res.statusText}`); } setTimeout(pollStatus, 5000); } catch (e) { console.error("Status poll failed", e); addDebugLog(`Poll Connection Failed: ${e.message}`); setTimeout(pollStatus, 5000); } }; if (loading) return (
Initializing ML Models for {vertical}...
); // Config based on vertical const configs = { fintech: { title: "Fintech Growth Intelligence", icon: , color: "blue", desc: "Predicting funding rounds and valuation shifts before they happen." }, ai_talent: { title: "AI Talent & Capital Prediction", icon: , color: "indigo", desc: "Tracking engineer migration to predict model breakthroughs." }, esg: { title: "ESG Impact & Greenwashing Detector", icon: , color: "emerald", desc: "Quantifying the gap between corporate claims and reality." }, regulatory: { title: "Regulatory Compliance Prediction", icon: , color: "red", desc: "Forecasting enforcement actions and fine probabilities." }, supply_chain: { title: "Supply Chain Resilience Intelligence", icon: , color: "amber", desc: "Predicting disruption risks and recovery timelines." } }; const config = configs[vertical] || configs.fintech; const isError = !data || data.error || !prediction || prediction.error || prediction.detail || !prediction.predictions; return (
{/* Header */}
{config.icon}

{config.title}

{config.desc}

{showFiles && (
Available Datasets
)}
{/* Content Area */} {isError ? (

System Initializing...

{status && status.logs ? (
{status.logs.map((log, i) => (
[{new Date().toLocaleTimeString()}] {log}
))}
) : (

{status && status.detail === "Not Found" ? "Backend is updating to v2.1... (This may take 2-3 mins)" : "Connecting to ML Engine Status Stream..."}

{status && status.detail === "Not Found" && (
)}
{/* Debug Console for Visibility */}
NETWORK DEBUG LOG:
{debugLogs.map((log, i) => (
{log}
))}
)} {status && status.progress !== undefined && (
{status.step} {status.progress}%
)}
) : (
{/* LEFT COLUMN: Active Predictions (8 cols) */}
{/* RIGHT COLUMN: Model Performance (4 cols) */}
{/* Additional Info Card */}

Data Sources

    {Object.keys(prediction.explanation).slice(0, 4).map((key, i) => (
  • {key.replace(/_/g, ' ')}
  • ))}
  • + 28 other signals processed
)}
); }; export default ProductSection;