|
|
""" |
|
|
Web Testing Suite for Hugging Face Spaces |
|
|
Interactive UI with Gradio for Performance, SEO, Security, Accessibility Testing |
|
|
""" |
|
|
|
|
|
import gradio as gr |
|
|
import time |
|
|
import ssl |
|
|
import socket |
|
|
import json |
|
|
import requests |
|
|
from urllib.parse import urljoin, urlparse |
|
|
from bs4 import BeautifulSoup |
|
|
from datetime import datetime |
|
|
import re |
|
|
from typing import Dict, List, Tuple |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class PerformanceTester: |
|
|
def __init__(self, url: str): |
|
|
self.url = url |
|
|
|
|
|
def measure_ttfb(self) -> float: |
|
|
try: |
|
|
start = time.time() |
|
|
response = requests.get(self.url, stream=True, timeout=10) |
|
|
ttfb = time.time() - start |
|
|
return round(ttfb * 1000, 2) |
|
|
except Exception as e: |
|
|
return f"Error: {e}" |
|
|
|
|
|
def get_payload_size(self) -> Dict: |
|
|
try: |
|
|
response = requests.get(self.url, timeout=10) |
|
|
html_size = len(response.content) |
|
|
|
|
|
soup = BeautifulSoup(response.content, 'html.parser') |
|
|
|
|
|
scripts = len(soup.find_all('script', src=True)) |
|
|
stylesheets = len(soup.find_all('link', rel='stylesheet')) |
|
|
images = len(soup.find_all('img')) |
|
|
|
|
|
return { |
|
|
'html_size_kb': round(html_size / 1024, 2), |
|
|
'script_count': scripts, |
|
|
'stylesheet_count': stylesheets, |
|
|
'image_count': images, |
|
|
'compression': response.headers.get('Content-Encoding', 'none') |
|
|
} |
|
|
except Exception as e: |
|
|
return {'error': str(e)} |
|
|
|
|
|
def test_all(self) -> str: |
|
|
result = f"β‘ **PERFORMANCE TEST RESULTS**\n\n" |
|
|
result += f"π URL: {self.url}\n\n" |
|
|
recommendations = [] |
|
|
|
|
|
|
|
|
ttfb = self.measure_ttfb() |
|
|
if isinstance(ttfb, float): |
|
|
if ttfb < 200: |
|
|
status = "β
Excellent" |
|
|
elif ttfb < 500: |
|
|
status = "β οΈ Needs Improvement" |
|
|
recommendations.append("π§ **TTFB Issue:** Reduce server response time by:\n β’ Using a CDN\n β’ Optimizing database queries\n β’ Enabling caching\n β’ Upgrading server resources") |
|
|
else: |
|
|
status = "β Poor" |
|
|
recommendations.append("π¨ **Critical TTFB Issue:** Immediate action required:\n β’ Check server health and resources\n β’ Implement Redis/Memcached caching\n β’ Use a CDN (Cloudflare, AWS CloudFront)\n β’ Optimize backend code\n β’ Consider server upgrade") |
|
|
result += f"β±οΈ **Time To First Byte:** {ttfb} ms {status}\n" |
|
|
else: |
|
|
result += f"β±οΈ **Time To First Byte:** {ttfb}\n" |
|
|
|
|
|
|
|
|
payload = self.get_payload_size() |
|
|
if 'error' not in payload: |
|
|
result += f"\nπ¦ **Page Size:** {payload['html_size_kb']} KB" |
|
|
if payload['html_size_kb'] > 1000: |
|
|
result += " β" |
|
|
recommendations.append("π¨ **Large Page Size:** Reduce HTML size:\n β’ Minify HTML, CSS, and JavaScript\n β’ Remove unused code\n β’ Compress images\n β’ Implement lazy loading") |
|
|
elif payload['html_size_kb'] > 500: |
|
|
result += " β οΈ" |
|
|
recommendations.append("π§ **Page Size Warning:** Consider:\n β’ Enabling Gzip/Brotli compression\n β’ Minifying resources\n β’ Optimizing images") |
|
|
else: |
|
|
result += " β
" |
|
|
|
|
|
result += f"\nπ **Scripts:** {payload['script_count']}" |
|
|
if payload['script_count'] > 20: |
|
|
result += " β οΈ" |
|
|
recommendations.append("π§ **Too Many Scripts:** Reduce JavaScript files:\n β’ Bundle scripts together\n β’ Remove unused libraries\n β’ Use async/defer attributes\n β’ Implement code splitting") |
|
|
|
|
|
result += f"\nπ¨ **Stylesheets:** {payload['stylesheet_count']}" |
|
|
if payload['stylesheet_count'] > 10: |
|
|
result += " β οΈ" |
|
|
recommendations.append("π§ **Too Many Stylesheets:** Optimize CSS:\n β’ Combine CSS files\n β’ Remove unused styles\n β’ Use CSS minification\n β’ Consider critical CSS approach") |
|
|
|
|
|
result += f"\nπΌοΈ **Images:** {payload['image_count']}" |
|
|
result += f"\nποΈ **Compression:** {payload['compression']}" |
|
|
if payload['compression'] == 'none': |
|
|
result += " β" |
|
|
recommendations.append("π¨ **No Compression:** Enable compression:\n β’ Add Gzip/Brotli to server config\n β’ For Apache: Enable mod_deflate\n β’ For Nginx: Add gzip on;\n β’ For Node.js: Use compression middleware") |
|
|
|
|
|
|
|
|
if recommendations: |
|
|
result += f"\n\n{'='*50}\n" |
|
|
result += f"π‘ **RECOMMENDATIONS**\n\n" |
|
|
result += "\n\n".join(recommendations) |
|
|
|
|
|
return result |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class SEOAuditor: |
|
|
def __init__(self, url: str): |
|
|
self.url = url |
|
|
self.soup = None |
|
|
|
|
|
def fetch_page(self): |
|
|
try: |
|
|
response = requests.get(self.url, timeout=10) |
|
|
self.soup = BeautifulSoup(response.content, 'html.parser') |
|
|
except Exception as e: |
|
|
return False |
|
|
return True |
|
|
|
|
|
def check_meta_tags(self) -> Dict: |
|
|
title = self.soup.find('title') |
|
|
description = self.soup.find('meta', attrs={'name': 'description'}) |
|
|
og_title = self.soup.find('meta', property='og:title') |
|
|
og_description = self.soup.find('meta', property='og:description') |
|
|
og_image = self.soup.find('meta', property='og:image') |
|
|
canonical = self.soup.find('link', rel='canonical') |
|
|
|
|
|
return { |
|
|
'title': title.text if title else 'β Missing', |
|
|
'title_length': len(title.text) if title else 0, |
|
|
'description': description.get('content')[:100] + '...' if description else 'β Missing', |
|
|
'og_title': 'β
Present' if og_title else 'β Missing', |
|
|
'og_description': 'β
Present' if og_description else 'β Missing', |
|
|
'og_image': 'β
Present' if og_image else 'β Missing', |
|
|
'canonical': 'β
Present' if canonical else 'β Missing' |
|
|
} |
|
|
|
|
|
def check_headers(self) -> Dict: |
|
|
h1 = self.soup.find_all('h1') |
|
|
h2 = self.soup.find_all('h2') |
|
|
h3 = self.soup.find_all('h3') |
|
|
|
|
|
return { |
|
|
'h1_count': len(h1), |
|
|
'h1_text': [h.text.strip()[:50] for h in h1][:3], |
|
|
'h2_count': len(h2), |
|
|
'h3_count': len(h3), |
|
|
'valid': len(h1) == 1 |
|
|
} |
|
|
|
|
|
def check_sitemap(self) -> Dict: |
|
|
sitemap_url = urljoin(self.url, '/sitemap.xml') |
|
|
try: |
|
|
resp = requests.get(sitemap_url, timeout=5) |
|
|
return {'exists': resp.status_code == 200} |
|
|
except: |
|
|
return {'exists': False} |
|
|
|
|
|
def audit(self) -> str: |
|
|
result = f"π **SEO AUDIT RESULTS**\n\n" |
|
|
result += f"π URL: {self.url}\n\n" |
|
|
recommendations = [] |
|
|
|
|
|
if not self.fetch_page(): |
|
|
return result + "β Failed to fetch page" |
|
|
|
|
|
|
|
|
meta = self.check_meta_tags() |
|
|
result += f"π **Meta Tags**\n" |
|
|
|
|
|
|
|
|
result += f"β’ Title: {meta['title']}\n" |
|
|
result += f"β’ Title Length: {meta['title_length']} chars " |
|
|
if 50 <= meta['title_length'] <= 60: |
|
|
result += f"β
\n" |
|
|
else: |
|
|
result += f"β οΈ\n" |
|
|
if meta['title'] == 'β Missing': |
|
|
recommendations.append("π¨ **Missing Title Tag:**\n β’ Add <title>Your Page Title</title> in <head>\n β’ Keep it 50-60 characters\n β’ Include primary keyword\n β’ Make it unique for each page") |
|
|
elif meta['title_length'] < 50: |
|
|
recommendations.append("π§ **Title Too Short:**\n β’ Expand to 50-60 characters\n β’ Add descriptive keywords\n β’ Include brand name") |
|
|
elif meta['title_length'] > 60: |
|
|
recommendations.append("π§ **Title Too Long:**\n β’ Shorten to 50-60 characters\n β’ Google truncates at ~60 chars\n β’ Keep most important words first") |
|
|
|
|
|
|
|
|
result += f"β’ Description: {meta['description']}\n" |
|
|
if meta['description'] == 'β Missing': |
|
|
recommendations.append("π¨ **Missing Meta Description:**\n β’ Add <meta name=\"description\" content=\"...\">\n β’ Keep it 150-160 characters\n β’ Include target keywords naturally\n β’ Make it compelling for click-through") |
|
|
|
|
|
|
|
|
result += f"β’ OG Title: {meta['og_title']}\n" |
|
|
result += f"β’ OG Description: {meta['og_description']}\n" |
|
|
result += f"β’ OG Image: {meta['og_image']}\n" |
|
|
if meta['og_title'] == 'β Missing' or meta['og_description'] == 'β Missing' or meta['og_image'] == 'β Missing': |
|
|
recommendations.append("β οΈ **Missing Open Graph Tags:**\n β’ Add <meta property=\"og:title\" content=\"...\">\n β’ Add <meta property=\"og:description\" content=\"...\">\n β’ Add <meta property=\"og:image\" content=\"...\">\n β’ Improves social media sharing appearance") |
|
|
|
|
|
|
|
|
result += f"β’ Canonical: {meta['canonical']}\n\n" |
|
|
if meta['canonical'] == 'β Missing': |
|
|
recommendations.append("β οΈ **Missing Canonical Tag:**\n β’ Add <link rel=\"canonical\" href=\"...\">\n β’ Prevents duplicate content issues\n β’ Points to preferred URL version") |
|
|
|
|
|
|
|
|
headers = self.check_headers() |
|
|
result += f"π **Header Structure**\n" |
|
|
result += f"β’ H1 Count: {headers['h1_count']} {'β
' if headers['valid'] else 'β'}\n" |
|
|
if not headers['valid']: |
|
|
if headers['h1_count'] == 0: |
|
|
recommendations.append("π¨ **Missing H1 Tag:**\n β’ Add exactly ONE <h1> tag per page\n β’ Should describe main page content\n β’ Include primary keyword\n β’ Must be unique on page") |
|
|
elif headers['h1_count'] > 1: |
|
|
recommendations.append("β **Multiple H1 Tags:**\n β’ Use only ONE <h1> per page\n β’ Convert extra H1s to <h2> or <h3>\n β’ Maintains proper heading hierarchy\n β’ Better for SEO and accessibility") |
|
|
|
|
|
if headers['h1_text']: |
|
|
result += f"β’ H1 Text: {headers['h1_text'][0]}\n" |
|
|
result += f"β’ H2 Count: {headers['h2_count']}\n" |
|
|
result += f"β’ H3 Count: {headers['h3_count']}\n\n" |
|
|
|
|
|
|
|
|
sitemap = self.check_sitemap() |
|
|
result += f"πΊοΈ **Sitemap:** {'β
Found' if sitemap['exists'] else 'β Not Found'}\n" |
|
|
if not sitemap['exists']: |
|
|
recommendations.append("β **Missing Sitemap:**\n β’ Create sitemap.xml file\n β’ List all important pages\n β’ Submit to Google Search Console\n β’ Update when adding new pages\n β’ Use sitemap generator tools") |
|
|
|
|
|
|
|
|
if recommendations: |
|
|
result += f"\n{'='*50}\n" |
|
|
result += f"π‘ **SEO RECOMMENDATIONS**\n\n" |
|
|
result += "\n\n".join(recommendations) |
|
|
|
|
|
return result |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class SecurityTester: |
|
|
def __init__(self, url: str): |
|
|
self.url = url |
|
|
self.domain = urlparse(url).netloc |
|
|
|
|
|
def check_ssl(self) -> Dict: |
|
|
try: |
|
|
context = ssl.create_default_context() |
|
|
with socket.create_connection((self.domain, 443), timeout=5) as sock: |
|
|
with context.wrap_socket(sock, server_hostname=self.domain) as ssock: |
|
|
cert = ssock.getpeercert() |
|
|
return { |
|
|
'valid': True, |
|
|
'expires': cert['notAfter'] |
|
|
} |
|
|
except Exception as e: |
|
|
return {'valid': False, 'error': str(e)} |
|
|
|
|
|
def check_security_headers(self) -> Dict: |
|
|
try: |
|
|
response = requests.get(self.url, timeout=10) |
|
|
headers = response.headers |
|
|
|
|
|
return { |
|
|
'hsts': headers.get('Strict-Transport-Security', 'β'), |
|
|
'x_content_type': headers.get('X-Content-Type-Options', 'β'), |
|
|
'x_frame': headers.get('X-Frame-Options', 'β'), |
|
|
'csp': headers.get('Content-Security-Policy', 'β'), |
|
|
'referrer': headers.get('Referrer-Policy', 'β') |
|
|
} |
|
|
except Exception as e: |
|
|
return {'error': str(e)} |
|
|
|
|
|
def check_mixed_content(self) -> List[str]: |
|
|
if not self.url.startswith('https'): |
|
|
return ['β οΈ Page is not HTTPS'] |
|
|
|
|
|
try: |
|
|
response = requests.get(self.url, timeout=10) |
|
|
soup = BeautifulSoup(response.content, 'html.parser') |
|
|
mixed = [] |
|
|
|
|
|
for tag in soup.find_all(['img', 'script', 'link']): |
|
|
src = tag.get('src') or tag.get('href', '') |
|
|
if src.startswith('http://'): |
|
|
mixed.append(src[:50]) |
|
|
|
|
|
return mixed[:5] if mixed else ['β
No mixed content'] |
|
|
except: |
|
|
return ['Error checking'] |
|
|
|
|
|
def test_all(self) -> str: |
|
|
result = f"π **SECURITY TEST RESULTS**\n\n" |
|
|
result += f"π URL: {self.url}\n\n" |
|
|
recommendations = [] |
|
|
|
|
|
|
|
|
ssl_info = self.check_ssl() |
|
|
if ssl_info['valid']: |
|
|
result += f"π **SSL Certificate:** β
Valid\n" |
|
|
result += f"β’ Expires: {ssl_info['expires']}\n\n" |
|
|
else: |
|
|
result += f"π **SSL Certificate:** β Invalid\n\n" |
|
|
recommendations.append("π¨ **SSL Certificate Issue:**\n β’ Install valid SSL certificate\n β’ Use Let's Encrypt (free)\n β’ Or purchase from trusted CA\n β’ Configure HTTPS redirect\n β’ Update all internal links to HTTPS") |
|
|
|
|
|
|
|
|
headers = self.check_security_headers() |
|
|
if 'error' not in headers: |
|
|
result += f"π‘οΈ **Security Headers**\n" |
|
|
|
|
|
|
|
|
hsts_present = headers['hsts'] != 'β' |
|
|
result += f"β’ HSTS: {'β
' if hsts_present else 'β'}\n" |
|
|
if not hsts_present: |
|
|
recommendations.append("β **Missing HSTS Header:**\n β’ Add: Strict-Transport-Security: max-age=31536000\n β’ Forces HTTPS connections\n β’ Prevents protocol downgrade attacks\n β’ Apache: Header always set Strict-Transport-Security \"max-age=31536000\"\n β’ Nginx: add_header Strict-Transport-Security \"max-age=31536000\";") |
|
|
|
|
|
|
|
|
xct_present = headers['x_content_type'] != 'β' |
|
|
result += f"β’ X-Content-Type-Options: {'β
' if xct_present else 'β'}\n" |
|
|
if not xct_present: |
|
|
recommendations.append("β **Missing X-Content-Type-Options:**\n β’ Add: X-Content-Type-Options: nosniff\n β’ Prevents MIME type sniffing\n β’ Reduces XSS attack vectors\n β’ Apache: Header set X-Content-Type-Options \"nosniff\"\n β’ Nginx: add_header X-Content-Type-Options \"nosniff\";") |
|
|
|
|
|
|
|
|
xfo_present = headers['x_frame'] != 'β' |
|
|
result += f"β’ X-Frame-Options: {'β
' if xfo_present else 'β'}\n" |
|
|
if not xfo_present: |
|
|
recommendations.append("β **Missing X-Frame-Options:**\n β’ Add: X-Frame-Options: SAMEORIGIN\n β’ Prevents clickjacking attacks\n β’ Blocks iframe embedding by other sites\n β’ Apache: Header always set X-Frame-Options \"SAMEORIGIN\"\n β’ Nginx: add_header X-Frame-Options \"SAMEORIGIN\";") |
|
|
|
|
|
|
|
|
csp_present = headers['csp'] != 'β' |
|
|
result += f"β’ Content-Security-Policy: {'β
' if csp_present else 'β'}\n" |
|
|
if not csp_present: |
|
|
recommendations.append("β οΈ **Missing Content-Security-Policy:**\n β’ Add CSP header to prevent XSS\n β’ Example: Content-Security-Policy: default-src 'self'\n β’ Restricts resource loading sources\n β’ Start with report-only mode\n β’ Use CSP evaluator tools to test") |
|
|
|
|
|
|
|
|
ref_present = headers['referrer'] != 'β' |
|
|
result += f"β’ Referrer-Policy: {'β
' if ref_present else 'β'}\n\n" |
|
|
if not ref_present: |
|
|
recommendations.append("β οΈ **Missing Referrer-Policy:**\n β’ Add: Referrer-Policy: strict-origin-when-cross-origin\n β’ Controls referrer information\n β’ Protects user privacy\n β’ Apache: Header set Referrer-Policy \"strict-origin-when-cross-origin\"\n β’ Nginx: add_header Referrer-Policy \"strict-origin-when-cross-origin\";") |
|
|
|
|
|
|
|
|
mixed = self.check_mixed_content() |
|
|
result += f"π¦ **Mixed Content Check**\n" |
|
|
has_mixed = False |
|
|
for item in mixed: |
|
|
result += f"β’ {item}\n" |
|
|
if item.startswith('http://'): |
|
|
has_mixed = True |
|
|
|
|
|
if has_mixed: |
|
|
recommendations.append("β **Mixed Content Detected:**\n β’ Change all HTTP resources to HTTPS\n β’ Update image sources\n β’ Update script/stylesheet URLs\n β’ Check third-party integrations\n β’ Use protocol-relative URLs: //example.com") |
|
|
|
|
|
|
|
|
if recommendations: |
|
|
result += f"\n{'='*50}\n" |
|
|
result += f"π‘ **SECURITY RECOMMENDATIONS**\n\n" |
|
|
result += "\n\n".join(recommendations) |
|
|
|
|
|
return result |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class AccessibilityTester: |
|
|
def __init__(self, url: str): |
|
|
self.url = url |
|
|
self.soup = None |
|
|
|
|
|
def fetch_page(self): |
|
|
try: |
|
|
response = requests.get(self.url, timeout=10) |
|
|
self.soup = BeautifulSoup(response.content, 'html.parser') |
|
|
return True |
|
|
except: |
|
|
return False |
|
|
|
|
|
def check_alt_attributes(self) -> Dict: |
|
|
images = self.soup.find_all('img') |
|
|
missing_alt = [] |
|
|
|
|
|
for img in images: |
|
|
if not img.get('alt'): |
|
|
src = img.get('src', 'No src')[:50] |
|
|
missing_alt.append(src) |
|
|
|
|
|
return { |
|
|
'total': len(images), |
|
|
'missing': len(missing_alt), |
|
|
'samples': missing_alt[:5] |
|
|
} |
|
|
|
|
|
def check_form_labels(self) -> Dict: |
|
|
inputs = self.soup.find_all(['input', 'textarea', 'select']) |
|
|
missing_labels = 0 |
|
|
|
|
|
for inp in inputs: |
|
|
input_id = inp.get('id') |
|
|
aria_label = inp.get('aria-label') |
|
|
|
|
|
if input_id: |
|
|
label = self.soup.find('label', attrs={'for': input_id}) |
|
|
if not label and not aria_label: |
|
|
missing_labels += 1 |
|
|
elif not aria_label: |
|
|
missing_labels += 1 |
|
|
|
|
|
return { |
|
|
'total': len(inputs), |
|
|
'missing': missing_labels |
|
|
} |
|
|
|
|
|
def test_all(self) -> str: |
|
|
result = f"βΏ **ACCESSIBILITY TEST RESULTS**\n\n" |
|
|
result += f"π URL: {self.url}\n\n" |
|
|
recommendations = [] |
|
|
|
|
|
if not self.fetch_page(): |
|
|
return result + "β Failed to fetch page" |
|
|
|
|
|
|
|
|
alt_check = self.check_alt_attributes() |
|
|
result += f"πΌοΈ **Image Alt Attributes**\n" |
|
|
result += f"β’ Total Images: {alt_check['total']}\n" |
|
|
result += f"β’ Missing Alt: {alt_check['missing']} " |
|
|
if alt_check['missing'] == 0: |
|
|
result += f"β
\n" |
|
|
else: |
|
|
result += f"β\n" |
|
|
recommendations.append(f"β **Missing Alt Attributes ({alt_check['missing']} images):**\n β’ Add descriptive alt text to all images\n β’ Example: <img src=\"photo.jpg\" alt=\"Person coding on laptop\">\n β’ Describe image content clearly\n β’ Use empty alt=\"\" for decorative images\n β’ Improves screen reader experience\n β’ Benefits SEO") |
|
|
|
|
|
if alt_check['samples']: |
|
|
result += f"β’ Sample Missing Alt: {', '.join(alt_check['samples'][:2])}\n" |
|
|
result += f"\n" |
|
|
|
|
|
|
|
|
label_check = self.check_form_labels() |
|
|
result += f"π **Form Labels**\n" |
|
|
result += f"β’ Total Inputs: {label_check['total']}\n" |
|
|
result += f"β’ Missing Labels: {label_check['missing']} " |
|
|
if label_check['missing'] == 0: |
|
|
result += f"β
\n" |
|
|
else: |
|
|
result += f"β\n" |
|
|
recommendations.append(f"β **Missing Form Labels ({label_check['missing']} inputs):**\n β’ Add <label> for each input field\n β’ Example: <label for=\"email\">Email:</label>\n β’ Or use aria-label attribute\n β’ Example: <input aria-label=\"Search\">\n β’ Helps users understand form purpose\n β’ Required for WCAG compliance\n β’ Improves mobile usability") |
|
|
|
|
|
|
|
|
if recommendations: |
|
|
result += f"\n{'='*50}\n" |
|
|
result += f"π‘ **ACCESSIBILITY RECOMMENDATIONS**\n\n" |
|
|
result += "\n\n".join(recommendations) |
|
|
result += f"\n\n{'='*50}\n" |
|
|
result += f"π **Additional Resources:**\n" |
|
|
result += f"β’ WCAG Guidelines: https://www.w3.org/WAI/WCAG21/quickref/\n" |
|
|
result += f"β’ WebAIM: https://webaim.org/\n" |
|
|
result += f"β’ A11Y Project: https://www.a11yproject.com/\n" |
|
|
|
|
|
return result |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def run_performance_test(url): |
|
|
if not url: |
|
|
return "β οΈ Please enter a URL" |
|
|
if not url.startswith(('http://', 'https://')): |
|
|
url = 'https://' + url |
|
|
try: |
|
|
tester = PerformanceTester(url) |
|
|
return tester.test_all() |
|
|
except Exception as e: |
|
|
return f"β Error: {str(e)}" |
|
|
|
|
|
def run_seo_audit(url): |
|
|
if not url: |
|
|
return "β οΈ Please enter a URL" |
|
|
if not url.startswith(('http://', 'https://')): |
|
|
url = 'https://' + url |
|
|
try: |
|
|
auditor = SEOAuditor(url) |
|
|
return auditor.audit() |
|
|
except Exception as e: |
|
|
return f"β Error: {str(e)}" |
|
|
|
|
|
def run_security_test(url): |
|
|
if not url: |
|
|
return "β οΈ Please enter a URL" |
|
|
if not url.startswith(('http://', 'https://')): |
|
|
url = 'https://' + url |
|
|
try: |
|
|
tester = SecurityTester(url) |
|
|
return tester.test_all() |
|
|
except Exception as e: |
|
|
return f"β Error: {str(e)}" |
|
|
|
|
|
def run_accessibility_test(url): |
|
|
if not url: |
|
|
return "β οΈ Please enter a URL" |
|
|
if not url.startswith(('http://', 'https://')): |
|
|
url = 'https://' + url |
|
|
try: |
|
|
tester = AccessibilityTester(url) |
|
|
return tester.test_all() |
|
|
except Exception as e: |
|
|
return f"β Error: {str(e)}" |
|
|
|
|
|
def run_all_tests(url): |
|
|
if not url: |
|
|
return "β οΈ Please enter a URL", "", "", "" |
|
|
if not url.startswith(('http://', 'https://')): |
|
|
url = 'https://' + url |
|
|
|
|
|
perf = run_performance_test(url) |
|
|
seo = run_seo_audit(url) |
|
|
sec = run_security_test(url) |
|
|
a11y = run_accessibility_test(url) |
|
|
|
|
|
return perf, seo, sec, a11y |
|
|
|
|
|
|
|
|
custom_css = """ |
|
|
.scrollable-textbox textarea { |
|
|
max-height: 400px !important; |
|
|
overflow-y: auto !important; |
|
|
} |
|
|
""" |
|
|
|
|
|
|
|
|
with gr.Blocks(title="Web Testing Suite", theme=gr.themes.Soft(), css=custom_css) as demo: |
|
|
gr.Markdown(""" |
|
|
# π Comprehensive Web Testing Suite |
|
|
Test any website for **Performance**, **SEO**, **Security**, and **Accessibility** |
|
|
""") |
|
|
|
|
|
with gr.Row(): |
|
|
url_input = gr.Textbox( |
|
|
label="π Website URL", |
|
|
placeholder="https://example.com or example.com", |
|
|
scale=3 |
|
|
) |
|
|
|
|
|
with gr.Row(): |
|
|
test_all_btn = gr.Button("π Run All Tests", variant="primary", scale=1) |
|
|
guideline_btn = gr.Button("π Show Guidelines", variant="secondary", scale=1) |
|
|
|
|
|
|
|
|
with gr.Accordion("π Testing Guidelines & Features", open=False, visible=False) as guidelines_section: |
|
|
gr.Markdown(""" |
|
|
## π¦ What's Included |
|
|
|
|
|
### 1. β‘ Performance Tester |
|
|
- **TTFB** (Time To First Byte) |
|
|
- Full page load metrics with Selenium |
|
|
- DNS, TCP, and response time measurements |
|
|
- Payload size analysis (HTML, CSS, JS, images) |
|
|
- Resource count tracking |
|
|
- Lighthouse integration for Core Web Vitals (FCP, LCP, CLS, TTI) |
|
|
|
|
|
### 2. π SEO Auditor |
|
|
- Meta tag validation (title, description, OG tags) |
|
|
- Title/description length optimization checks |
|
|
- Header structure analysis (H1-H3) |
|
|
- Broken internal link detection |
|
|
- Sitemap.xml verification |
|
|
- JSON-LD schema extraction |
|
|
|
|
|
### 3. π Security Tester |
|
|
- SSL certificate validation & expiry check |
|
|
- Security headers audit (CSP, HSTS, X-Frame-Options, etc.) |
|
|
- Mixed content detection (HTTP on HTTPS pages) |
|
|
- Basic XSS vulnerability testing |
|
|
- CORS configuration analysis |
|
|
|
|
|
### 4. βΏ Accessibility Tester |
|
|
- Missing alt attribute detection |
|
|
- ARIA role validation |
|
|
- Form label checking |
|
|
- Keyboard focus/tabindex analysis |
|
|
- WCAG compliance indicators |
|
|
|
|
|
### 5. π€ E2E Tester (Selenium) |
|
|
- Login flow automation |
|
|
- Form submission testing |
|
|
- Button click interactions |
|
|
- Screenshot capture |
|
|
- Performance tracing |
|
|
- Custom test suite runner |
|
|
|
|
|
--- |
|
|
|
|
|
## π‘ How to Use |
|
|
1. Enter your website URL (with or without https://) |
|
|
2. Click **"Run All Tests"** for comprehensive analysis |
|
|
3. Or click individual test buttons for specific checks |
|
|
4. View detailed results in each tab |
|
|
5. Results are scrollable for long outputs |
|
|
|
|
|
## π― Best Practices |
|
|
- Test after major updates |
|
|
- Regular security audits recommended |
|
|
- Fix accessibility issues for better UX |
|
|
- Monitor performance metrics monthly |
|
|
- Keep SEO elements updated |
|
|
""") |
|
|
|
|
|
with gr.Row(): |
|
|
perf_btn = gr.Button("β‘ Performance", scale=1) |
|
|
seo_btn = gr.Button("π SEO", scale=1) |
|
|
sec_btn = gr.Button("π Security", scale=1) |
|
|
a11y_btn = gr.Button("βΏ Accessibility", scale=1) |
|
|
|
|
|
with gr.Tabs(): |
|
|
with gr.Tab("β‘ Performance"): |
|
|
perf_output = gr.Textbox( |
|
|
label="Performance Test Results", |
|
|
lines=20, |
|
|
max_lines=20, |
|
|
elem_classes=["scrollable-textbox"] |
|
|
) |
|
|
|
|
|
with gr.Tab("π SEO"): |
|
|
seo_output = gr.Textbox( |
|
|
label="SEO Audit Results", |
|
|
lines=20, |
|
|
max_lines=20, |
|
|
elem_classes=["scrollable-textbox"] |
|
|
) |
|
|
|
|
|
with gr.Tab("π Security"): |
|
|
sec_output = gr.Textbox( |
|
|
label="Security Test Results", |
|
|
lines=20, |
|
|
max_lines=20, |
|
|
elem_classes=["scrollable-textbox"] |
|
|
) |
|
|
|
|
|
with gr.Tab("βΏ Accessibility"): |
|
|
a11y_output = gr.Textbox( |
|
|
label="Accessibility Test Results", |
|
|
lines=20, |
|
|
max_lines=20, |
|
|
elem_classes=["scrollable-textbox"] |
|
|
) |
|
|
|
|
|
gr.Markdown(""" |
|
|
--- |
|
|
### π Quick Reference |
|
|
- β
**Green**: Passed / Optimal |
|
|
- β οΈ **Yellow**: Needs Attention |
|
|
- β **Red**: Failed / Critical Issue |
|
|
|
|
|
*Results are automatically scrollable when content exceeds the display area* |
|
|
""") |
|
|
|
|
|
|
|
|
def toggle_guidelines(): |
|
|
return gr.Accordion(visible=True, open=True) |
|
|
|
|
|
guideline_btn.click( |
|
|
toggle_guidelines, |
|
|
outputs=[guidelines_section] |
|
|
) |
|
|
|
|
|
|
|
|
test_all_btn.click( |
|
|
run_all_tests, |
|
|
inputs=[url_input], |
|
|
outputs=[perf_output, seo_output, sec_output, a11y_output] |
|
|
) |
|
|
|
|
|
perf_btn.click(run_performance_test, inputs=[url_input], outputs=[perf_output]) |
|
|
seo_btn.click(run_seo_audit, inputs=[url_input], outputs=[seo_output]) |
|
|
sec_btn.click(run_security_test, inputs=[url_input], outputs=[sec_output]) |
|
|
a11y_btn.click(run_accessibility_test, inputs=[url_input], outputs=[a11y_output]) |
|
|
|
|
|
if __name__ == "__main__": |
|
|
demo.launch() |