Spaces:
Running
Running
| import gradio as gr | |
| import torch | |
| import torch.nn as nn | |
| from torchvision import transforms, models | |
| from PIL import Image | |
| DEVICE = torch.device("cpu") | |
| IMG_SIZE = 224 | |
| classes = ['fake', 'real'] | |
| model = models.efficientnet_b0(weights=None) | |
| in_f = model.classifier[1].in_features | |
| model.classifier = nn.Linear(in_f, 2) | |
| model.load_state_dict(torch.load("deepfake_detector.pth", map_location=DEVICE)) | |
| model.to(DEVICE) | |
| model.eval() | |
| clean_tf = transforms.Compose([ | |
| transforms.Resize((IMG_SIZE, IMG_SIZE)), | |
| transforms.ToTensor(), | |
| transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) | |
| ]) | |
| def enhanced_predict(img): | |
| if img is None: | |
| return None, "<div class='warning-box'>SYSTEM ALERT: Please upload or capture an image for analysis.</div>" | |
| img = img.resize((IMG_SIZE, IMG_SIZE)) | |
| t = clean_tf(img).unsqueeze(0).to(DEVICE) | |
| model.eval() | |
| with torch.no_grad(): | |
| out = model(t) | |
| prob = torch.softmax(out, 1)[0] | |
| fake_prob = float(prob[0]) | |
| real_prob = float(prob[1]) | |
| confidences = {classes[0]: fake_prob, classes[1]: real_prob} | |
| max_conf = max(fake_prob, real_prob) * 100 | |
| if max_conf < 60.0: | |
| warning_msg = f"<div class='warning-box'>[WARNING] Low Confidence ({max_conf:.1f}%). Network anomaly detected. Manual review advised.</div>" | |
| else: | |
| warning_msg = f"<div class='success-box'>[SECURE] High Confidence Prediction ({max_conf:.1f}%)</div>" | |
| return confidences, warning_msg | |
| custom_css = """ | |
| :root, .gradio-container, .dark { | |
| --background-fill-primary: #0b0f19 !important; | |
| --background-fill-secondary: #0f172a !important; | |
| --block-background-fill: #1e293b !important; | |
| --block-border-color: rgba(56, 189, 248, 0.3) !important; | |
| --body-text-color: #e2e8f0 !important; | |
| --block-label-text-color: #38bdf8 !important; | |
| --border-color-primary: rgba(56, 189, 248, 0.3) !important; | |
| } | |
| body { | |
| background-color: #0b0f19 !important; | |
| background-image: radial-gradient(circle at 50% 0%, #1e293b 0%, #0b0f19 70%) !important; | |
| font-family: 'Inter', sans-serif !important; | |
| } | |
| .header-box { | |
| background: rgba(15, 23, 42, 0.7); | |
| backdrop-filter: blur(12px); | |
| padding: 2.5rem; | |
| border-radius: 12px; | |
| text-align: center; | |
| margin-bottom: 2rem; | |
| border: 1px solid rgba(56, 189, 248, 0.3); | |
| border-top: 3px solid #38bdf8; | |
| box-shadow: 0 10px 30px -10px rgba(56, 189, 248, 0.3); | |
| } | |
| .header-box h1 { | |
| margin-bottom: 0.5rem; | |
| color: #f8fafc !important; | |
| font-weight: 900; | |
| letter-spacing: 2px; | |
| font-size: 2.5em; | |
| text-transform: uppercase; | |
| text-shadow: 0 0 15px rgba(56,189,248,0.5); | |
| } | |
| .header-box p { | |
| font-size: 1.1rem; | |
| color: #94a3b8 !important; | |
| font-weight: 400; | |
| } | |
| .tabs > .tab-nav { | |
| border: none !important; | |
| display: flex !important; | |
| justify-content: center !important; | |
| gap: 15px !important; | |
| margin-bottom: 2.5rem !important; | |
| background: transparent !important; | |
| } | |
| .tabs > .tab-nav > button { | |
| background: linear-gradient(145deg, #1e293b, #0f172a) !important; | |
| border: 1px solid rgba(56, 189, 248, 0.4) !important; | |
| color: #94a3b8 !important; | |
| padding: 12px 30px !important; | |
| border-radius: 50px !important; | |
| font-weight: 700 !important; | |
| text-transform: uppercase !important; | |
| letter-spacing: 1px !important; | |
| box-shadow: 0 4px 10px rgba(0,0,0,0.5) !important; | |
| transition: all 0.3s ease !important; | |
| } | |
| .tabs > .tab-nav > button:hover { | |
| color: #38bdf8 !important; | |
| border-color: #38bdf8 !important; | |
| box-shadow: 0 0 15px rgba(56,189,248,0.3) !important; | |
| transform: translateY(-2px); | |
| } | |
| .tabs > .tab-nav > button.selected { | |
| background: linear-gradient(135deg, #0284c7 0%, #38bdf8 100%) !important; | |
| color: #ffffff !important; | |
| border: 1px solid #7dd3fc !important; | |
| box-shadow: 0 0 20px rgba(56, 189, 248, 0.6), inset 0 2px 5px rgba(255,255,255,0.3) !important; | |
| text-shadow: 0 2px 4px rgba(0,0,0,0.3) !important; | |
| transform: scale(1.05); | |
| } | |
| .gradio-container h3 { | |
| color: #38bdf8 !important; | |
| font-family: 'Courier New', monospace !important; | |
| text-transform: uppercase; | |
| letter-spacing: 1.5px; | |
| border-bottom: 1px solid rgba(56, 189, 248, 0.3); | |
| padding-bottom: 8px; | |
| margin-bottom: 15px; | |
| } | |
| .gradio-container span.text-gray-500 { | |
| color: #94a3b8 !important; | |
| } | |
| .info-card { | |
| background: rgba(30, 41, 59, 0.6); | |
| padding: 1.5rem; | |
| border-radius: 8px; | |
| border-left: 4px solid #818cf8; | |
| margin-bottom: 1.5rem; | |
| box-shadow: 0 4px 6px rgba(0, 0, 0, 0.3); | |
| border-top: 1px solid rgba(255,255,255,0.05); | |
| border-right: 1px solid rgba(255,255,255,0.05); | |
| border-bottom: 1px solid rgba(255,255,255,0.05); | |
| transition: transform 0.3s ease, box-shadow 0.3s ease; | |
| } | |
| .info-card:hover { | |
| transform: translateY(-5px); | |
| box-shadow: 0 10px 20px rgba(0, 0, 0, 0.5); | |
| background: rgba(30, 41, 59, 0.9); | |
| } | |
| .info-card p, .info-card li { | |
| color: #f1f5f9 !important; | |
| font-size: 1.05rem; | |
| line-height: 1.6; | |
| } | |
| button.primary { | |
| background: linear-gradient(135deg, #2563eb 0%, #3b82f6 100%) !important; | |
| border: 1px solid #60a5fa !important; | |
| color: white !important; | |
| font-weight: 800 !important; | |
| letter-spacing: 1px !important; | |
| border-radius: 8px !important; | |
| text-transform: uppercase !important; | |
| box-shadow: 0 4px 15px rgba(37, 99, 235, 0.5) !important; | |
| transition: all 0.3s ease !important; | |
| margin-top: 10px !important; | |
| } | |
| button.primary:hover { | |
| transform: translateY(-2px) !important; | |
| box-shadow: 0 8px 25px rgba(37, 99, 235, 0.7) !important; | |
| background: linear-gradient(135deg, #3b82f6 0%, #60a5fa 100%) !important; | |
| } | |
| .warning-box { border-left: 4px solid #f59e0b; background: rgba(245, 158, 11, 0.15); padding: 1rem; border-radius: 6px; color: #fcd34d !important; font-weight: 600; font-size: 1.1em;} | |
| .success-box { border-left: 4px solid #10b981; background: rgba(16, 185, 129, 0.15); padding: 1rem; border-radius: 6px; color: #6ee7b7 !important; font-weight: 600; font-size: 1.1em;} | |
| .gradio-container button[aria-label="Capture"], | |
| .gradio-container button[aria-label="Clear"], | |
| .gradio-container .icon-button, | |
| [data-testid="image"] button { | |
| background: #0284c7 !important; | |
| color: #ffffff !important; | |
| border: 2px solid #7dd3fc !important; | |
| border-radius: 50% !important; | |
| opacity: 1 !important; | |
| visibility: visible !important; | |
| box-shadow: 0 0 15px rgba(56, 189, 248, 0.8) !important; | |
| transform: scale(1.2) !important; | |
| transition: all 0.2s ease !important; | |
| } | |
| [data-testid="image"] button:hover { | |
| background: #38bdf8 !important; | |
| transform: scale(1.3) !important; | |
| } | |
| [data-testid="image"] button svg { | |
| stroke: #ffffff !important; | |
| fill: #ffffff !important; | |
| } | |
| """ | |
| with gr.Blocks() as demo: | |
| gr.HTML(""" | |
| <div class="header-box"> | |
| <h1>TrueFace AI: Deepfake Detector</h1> | |
| <p>Unmasking the truth in digital media using Advanced Neural Networks</p> | |
| </div> | |
| """) | |
| with gr.Tab("Awareness & Education"): | |
| with gr.Row(): | |
| with gr.Column(scale=1): | |
| gr.Image("project.jpeg", show_label=False, interactive=False) | |
| gr.Markdown("<center><i style='color:#94a3b8;'>Conceptual visualization of biometric facial mapping.</i></center>") | |
| with gr.Column(scale=1): | |
| gr.HTML(""" | |
| <div class="info-card"> | |
| <h3 style="border:none; margin-bottom:5px;">The Deepfake Threat</h3> | |
| <p>Deepfakes utilize artificial intelligence to create highly realistic, manipulated videos and images. They seamlessly map target faces onto source actors, creating a digital illusion that is increasingly difficult to detect with the human eye.</p> | |
| </div> | |
| <div class="info-card"> | |
| <h3 style="border:none; margin-bottom:5px;">Crimes & Social Impact</h3> | |
| <ul style="padding-left: 20px;"> | |
| <li style="margin-bottom: 8px;"><strong>Identity Theft:</strong> Scammers use face and voice clones to bypass biometric security protocols.</li> | |
| <li style="margin-bottom: 8px;"><strong>Political Misinformation:</strong> Fabricated news clips can manipulate elections and public opinion.</li> | |
| <li><strong>Reputation Damage:</strong> Innocent individuals are targeted with synthetic media designed for defamation.</li> | |
| </ul> | |
| </div> | |
| <div class="info-card"> | |
| <h3 style="border:none; margin-bottom:5px;">Detection & Prevention</h3> | |
| <p>Always verify media sources. Look for unnatural blinking, blending errors around the jawline, or digital artifacts. Use advanced CNN-based tools to mathematically analyze pixel inconsistencies and frequency domain anomalies.</p> | |
| </div> | |
| """) | |
| with gr.Tab("Deepfake Detector"): | |
| with gr.Row(): | |
| with gr.Column(scale=1): | |
| gr.Markdown("### Upload or Capture Suspect Image") | |
| input_image = gr.Image(type="pil", sources=["upload", "webcam"], label="Input Media Stream", interactive=True) | |
| analyze_btn = gr.Button("Initialize Image Analysis", variant="primary", size="lg") | |
| with gr.Column(scale=1): | |
| gr.Markdown("### AI Analysis Results") | |
| output_label = gr.Label(label="Network Probability Distribution", num_top_classes=2) | |
| output_warning = gr.HTML(label="System Status Module") | |
| analyze_btn.click(fn=enhanced_predict, inputs=input_image, outputs=[output_label, output_warning]) | |
| demo.launch(theme=gr.themes.Base(), css=custom_css) |