"""
src/streamlit_with_api.py
Professional Streamlit UI for DeepGuard AI - DeepFake Detection System
Integrated FastAPI Backend - Starts API in background thread
Run with: streamlit run src/streamlit_with_api.py
"""
import streamlit as st
import requests
from io import BytesIO
import tempfile
import os
import sys
import threading
import time
import uvicorn
# Page configuration - MUST be first Streamlit command
st.set_page_config(
page_title="DeepGuard AI - DeepFake Detection",
page_icon="đĄī¸",
layout="wide",
initial_sidebar_state="expanded"
)
# Add project root to path for imports
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if ROOT_DIR not in sys.path:
sys.path.insert(0, ROOT_DIR)
# Import FastAPI app
try:
# Add app directory to path
app_dir = os.path.join(ROOT_DIR, "app")
if app_dir not in sys.path:
sys.path.insert(0, app_dir)
# Import the FastAPI app
import main
fastapi_app = main.app
FASTAPI_AVAILABLE = True
except ImportError as e:
st.error(f"FastAPI not available: {e}")
FASTAPI_AVAILABLE = False
# đĨ HF-Compatible Flow: Start FastAPI in background thread
def start_api():
"""Start FastAPI server in background thread"""
if FASTAPI_AVAILABLE:
try:
uvicorn.run(
fastapi_app,
host="127.0.0.1",
port=8000,
log_level="warning"
)
except Exception as e:
print(f"Error starting FastAPI: {e}")
# Start FastAPI only once per session
if FASTAPI_AVAILABLE and "api_started" not in st.session_state:
api_thread = threading.Thread(target=start_api, daemon=True)
api_thread.start()
st.session_state.api_started = True
time.sleep(2) # Give API time to start
# Custom CSS for professional look
st.markdown("""
""", unsafe_allow_html=True)
# Header
st.markdown("""
""", unsafe_allow_html=True)
# Sidebar for configuration
with st.sidebar:
st.markdown("### âī¸ Configuration")
# API Status
st.markdown("### đ Model Status")
if FASTAPI_AVAILABLE and "api_started" in st.session_state:
st.markdown('FastAPI: Running (127.0.0.1:8000)', unsafe_allow_html=True)
st.markdown('Video Model: Optimized', unsafe_allow_html=True)
st.markdown('Image Model: Disabled', unsafe_allow_html=True)
else:
st.markdown('FastAPI: Not Available', unsafe_allow_html=True)
st.markdown("---")
# â
API URL - Same as requested
API_URL = "http://127.0.0.1:8000/predict"
st.text_input(
"API Endpoint",
value=API_URL,
disabled=True,
help="FastAPI running internally on port 8000"
)
st.markdown("---")
st.markdown("### đ Supported Formats")
st.markdown("""
**Images:**
- JPG, JPEG, PNG
**Videos:**
- MP4, AVI, MOV, MKV
""")
st.markdown("---")
st.markdown("### âšī¸ About")
st.markdown("""
DeepGuard AI uses advanced deep learning
models to detect deepfake content in
videos with high accuracy.
**Memory Optimized Version:**
- â Image processing disabled
- â
Video processing only
- đ 50-70% less memory usage
""")
# Main content area
st.markdown("""
đ How It Works
Upload a video file to analyze. Our AI-powered system will detect if the content is real or a deepfake using state-of-the-art neural networks. This version is optimized for memory efficiency with video-only processing.
""", unsafe_allow_html=True)
# File upload section
st.markdown("### đ¤ Upload Video for Analysis")
col1, col2 = st.columns([1, 1])
with col1:
uploaded_file = st.file_uploader(
"Choose a video file",
type=["mp4", "avi", "mov", "mkv", "flv", "wmv", "webm"],
help="Upload a video file to detect deepfakes (Image processing disabled)",
label_visibility="collapsed"
)
with col2:
st.markdown("""
⨠Features
- Video DeepFake Detection
- Memory Optimized
- Real-time Analysis
- 50-70% Less Memory Usage
- Fast Processing
""", unsafe_allow_html=True)
# Display uploaded file and process
if uploaded_file is not None:
st.markdown("---")
# Always video (since only videos are allowed)
file_extension = uploaded_file.name.split('.')[-1].lower()
col1, col2 = st.columns([2, 1])
with col1:
st.markdown("### đš Uploaded Video")
st.info(f"Video file: {uploaded_file.name} ({uploaded_file.size / (1024*1024):.2f} MB)")
st.markdown("**Note:** Video processing optimized for memory efficiency.")
with col2:
st.markdown("### đ File Information")
st.metric("File Name", uploaded_file.name)
st.metric("File Size", f"{uploaded_file.size / (1024*1024):.2f} MB")
st.metric("File Type", "Video")
st.metric("Processing", "Optimized")
# Analyze button
st.markdown("---")
col1, col2, col3 = st.columns([1, 2, 1])
with col2:
analyze_button = st.button(
"đ Analyze Video for DeepFake",
use_container_width=True,
type="primary"
)
if analyze_button:
# Save uploaded file temporarily
with tempfile.NamedTemporaryFile(delete=False, suffix=f".{file_extension}") as tmp_file:
tmp_file.write(uploaded_file.getvalue())
tmp_path = tmp_file.name
try:
# Prepare file for upload
files = {
"file": (
uploaded_file.name,
open(tmp_path, "rb"),
uploaded_file.type if hasattr(uploaded_file, 'type') else f"video/{file_extension}"
)
}
# Show progress
with st.spinner("đ Analyzing video... Memory optimized processing..."):
response = requests.post(API_URL, files=files, timeout=180) # Longer timeout for videos
# Close file
files["file"][1].close()
if response.status_code == 200:
result = response.json()
# Display results in a nice format
st.markdown("---")
st.markdown("### đ Analysis Results")
# Prediction result
prediction = result.get("prediction", "unknown")
probabilities = result.get("probabilities", [[0, 0]])
# Color coding
if prediction.lower() == "fake":
prediction_color = "đ´"
prediction_bg = "#ffebee"
else:
prediction_color = "đĸ"
prediction_bg = "#e8f5e9"
col1, col2 = st.columns([1, 1])
with col1:
st.markdown(f"""
{prediction_color} Prediction
{prediction}
""", unsafe_allow_html=True)
with col2:
# Probability display
if len(probabilities) > 0 and len(probabilities[0]) >= 2:
real_prob = probabilities[0][0] * 100
fake_prob = probabilities[0][1] * 100
st.markdown("### đ Confidence Scores")
st.progress(real_prob / 100, text=f"Real: {real_prob:.2f}%")
st.progress(fake_prob / 100, text=f"Fake: {fake_prob:.2f}%")
# Detailed results
with st.expander("đ View Detailed Results"):
st.json(result)
# Success message
st.success("â
Analysis completed successfully!")
else:
st.error(f"â API Error {response.status_code}: {response.text}")
except requests.exceptions.Timeout:
st.error("âąī¸ Request timed out. The file might be too large or the server is busy.")
except requests.exceptions.ConnectionError:
st.error("đ Connection error. Please make sure the API server is running.")
except Exception as e:
st.error(f"â Error: {str(e)}")
finally:
# Clean up temp file
try:
os.unlink(tmp_path)
except:
pass
else:
# Show placeholder when no file is uploaded
st.markdown("""
đ Upload a file to get started
Select an image or video file from your device to analyze for deepfake content
""", unsafe_allow_html=True)
# Footer
st.markdown("""
""", unsafe_allow_html=True)