Update main.py
Browse files
main.py
CHANGED
|
@@ -1,14 +1,6 @@
|
|
| 1 |
-
|
| 2 |
-
"""
|
| 3 |
-
Luminet - Network Analysis Tool
|
| 4 |
-
===============================
|
| 5 |
-
|
| 6 |
-
A comprehensive network analysis tool that provides IP and domain reconnaissance
|
| 7 |
-
using only public, free data sources.
|
| 8 |
-
"""
|
| 9 |
-
|
| 10 |
-
import os, re, time, json, socket, logging, threading, hashlib, ipaddress, subprocess, concurrent.futures, requests, pycountry, ssl
|
| 11 |
from datetime import datetime, timedelta, timezone
|
|
|
|
| 12 |
from collections import defaultdict, Counter
|
| 13 |
from flask import Flask, render_template, request, jsonify
|
| 14 |
from OpenSSL import crypto
|
|
@@ -19,7 +11,7 @@ from dns.resolver import NoAnswer, NXDOMAIN, Timeout, NoNameservers, YXDOMAIN
|
|
| 19 |
from dns.exception import DNSException
|
| 20 |
from dns.query import BadResponse
|
| 21 |
|
| 22 |
-
#
|
| 23 |
try:
|
| 24 |
import scapy.all as scapy
|
| 25 |
SCAPY_AVAILABLE = True
|
|
@@ -29,20 +21,20 @@ except (ImportError, PermissionError):
|
|
| 29 |
try:
|
| 30 |
import nmap
|
| 31 |
NMAP_AVAILABLE = True
|
| 32 |
-
except
|
| 33 |
NMAP_AVAILABLE = False
|
| 34 |
|
| 35 |
-
# Flask
|
| 36 |
app = Flask(__name__)
|
| 37 |
-
app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY',
|
| 38 |
app.jinja_env.globals.update(json=json)
|
| 39 |
app.jinja_env.filters['tojson'] = json.dumps
|
| 40 |
|
| 41 |
-
#
|
| 42 |
logging.basicConfig(level=logging.INFO)
|
| 43 |
logger = logging.getLogger(__name__)
|
| 44 |
|
| 45 |
-
# API
|
| 46 |
IANA_IP_BOOTSTRAP_URL = "https://rdap.iana.org/ip/"
|
| 47 |
IANA_ASN_BOOTSTRAP_URL = "https://rdap.iana.org/autnum/"
|
| 48 |
RIPESTAT_DATA_URL = "https://stat.ripe.net/data/"
|
|
@@ -50,10 +42,10 @@ IPINFO_URL = "https://ipinfo.io/"
|
|
| 50 |
IP_API_URL = "http://ip-api.com/json/"
|
| 51 |
OSM_NOMINATIM_URL = "https://nominatim.openstreetmap.org/search"
|
| 52 |
|
| 53 |
-
#
|
| 54 |
REQUEST_DELAY = 0.1
|
| 55 |
|
| 56 |
-
# DNSBL
|
| 57 |
DNS_BL_LISTS = {
|
| 58 |
'spam': {
|
| 59 |
'zen.spamhaus.org': 'Spamhaus Zen (SBL+XBL+PBL)',
|
|
@@ -66,12 +58,14 @@ DNS_BL_LISTS = {
|
|
| 66 |
'rbl.0spam.org': '0Spam Realtime BL',
|
| 67 |
'bl.nordspam.com': 'NordSpam IP BL',
|
| 68 |
},
|
|
|
|
| 69 |
'domain': {
|
| 70 |
'dbl.0spam.org': '0Spam Domain BL',
|
| 71 |
'dbl.nordspam.com': 'NordSpam Domain BL',
|
| 72 |
'uribl.spameatingmonkey.net': 'SEM URI BL',
|
| 73 |
'urired.spameatingmonkey.net': 'SEM URI Red',
|
| 74 |
},
|
|
|
|
| 75 |
'network': {
|
| 76 |
'b.barracudacentral.org': 'Barracuda Reputation Block List',
|
| 77 |
'dnsbl.sorbs.net': 'SORBS Aggregate',
|
|
@@ -84,6 +78,7 @@ DNS_BL_LISTS = {
|
|
| 84 |
'bl.ipv6.spameatingmonkey.net': 'SEM IPv6 BL',
|
| 85 |
'nbl.0spam.org': '0Spam Network BL',
|
| 86 |
},
|
|
|
|
| 87 |
'proxy_bot': {
|
| 88 |
'dnsbl.dronebl.org': 'DroneBL',
|
| 89 |
'tor.dan.me.uk': 'Tor Exit Nodes',
|
|
@@ -94,6 +89,7 @@ DNS_BL_LISTS = {
|
|
| 94 |
'socks.dnsbl.sorbs.net': 'SORBS SOCKS Proxies',
|
| 95 |
'zombie.dnsbl.sorbs.net': 'SORBS Zombies',
|
| 96 |
},
|
|
|
|
| 97 |
'misc': {
|
| 98 |
'psbl.surriel.com': 'Passive Spam Block List',
|
| 99 |
'db.wpbl.info': 'Weighted Private Block List',
|
|
@@ -106,7 +102,6 @@ DNS_BL_LISTS = {
|
|
| 106 |
}
|
| 107 |
|
| 108 |
def get_dns_records(domain):
|
| 109 |
-
"""Get comprehensive DNS records for a domain."""
|
| 110 |
records = {
|
| 111 |
"A": [], "AAAA": [], "MX": [], "TXT": [], "NS": [], "CNAME": None, "SOA": None,
|
| 112 |
"SRV": [], "CAA": [], "DMARC": [], "SPF": [], "PTR": [], "NAPTR": [], "SSHFP": [],
|
|
@@ -117,15 +112,15 @@ def get_dns_records(domain):
|
|
| 117 |
resolver.timeout = 5
|
| 118 |
resolver.lifetime = 5
|
| 119 |
|
| 120 |
-
# A
|
| 121 |
for rdata in safe_resolve(resolver, domain, 'A'):
|
| 122 |
records["A"].append(rdata.address)
|
| 123 |
|
| 124 |
-
# AAAA
|
| 125 |
for rdata in safe_resolve(resolver, domain, 'AAAA'):
|
| 126 |
records["AAAA"].append(rdata.address)
|
| 127 |
|
| 128 |
-
# PTR
|
| 129 |
for ip_addr in records["A"] + records["AAAA"]:
|
| 130 |
try:
|
| 131 |
rev_name = dns.reversename.from_address(ip_addr)
|
|
@@ -134,11 +129,11 @@ def get_dns_records(domain):
|
|
| 134 |
except Exception:
|
| 135 |
pass
|
| 136 |
|
| 137 |
-
# MX
|
| 138 |
for rdata in safe_resolve(resolver, domain, 'MX'):
|
| 139 |
records["MX"].append({"preference": rdata.preference, "exchange": rdata.exchange.to_text()})
|
| 140 |
|
| 141 |
-
# TXT
|
| 142 |
for rdata in safe_resolve(resolver, domain, 'TXT'):
|
| 143 |
for txt_string in rdata.strings:
|
| 144 |
txt_record = txt_string.decode('utf-8')
|
|
@@ -146,21 +141,21 @@ def get_dns_records(domain):
|
|
| 146 |
if txt_record.lower().startswith('v=spf'):
|
| 147 |
records["SPF"].append(txt_record)
|
| 148 |
|
| 149 |
-
# DMARC
|
| 150 |
for rdata in safe_resolve(resolver, f'_dmarc.{domain}', 'TXT'):
|
| 151 |
for txt_string in rdata.strings:
|
| 152 |
records["DMARC"].append(txt_string.decode('utf-8'))
|
| 153 |
|
| 154 |
-
# NS
|
| 155 |
for rdata in safe_resolve(resolver, domain, 'NS'):
|
| 156 |
records["NS"].append(rdata.target.to_text())
|
| 157 |
|
| 158 |
-
# CNAME
|
| 159 |
cname_result = safe_resolve(resolver, domain, 'CNAME')
|
| 160 |
if cname_result:
|
| 161 |
records["CNAME"] = cname_result[0].target.to_text()
|
| 162 |
|
| 163 |
-
# SOA
|
| 164 |
soa_result = safe_resolve(resolver, domain, 'SOA')
|
| 165 |
if soa_result:
|
| 166 |
soa = soa_result[0]
|
|
@@ -174,26 +169,43 @@ def get_dns_records(domain):
|
|
| 174 |
"minimum": soa.minimum
|
| 175 |
}
|
| 176 |
|
| 177 |
-
#
|
| 178 |
-
for
|
| 179 |
-
|
| 180 |
-
|
| 181 |
-
|
| 182 |
-
|
| 183 |
-
|
| 184 |
-
|
| 185 |
-
|
| 186 |
-
|
| 187 |
-
|
| 188 |
-
|
| 189 |
-
|
| 190 |
-
|
| 191 |
-
|
| 192 |
-
|
| 193 |
-
|
| 194 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 195 |
|
| 196 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 197 |
try:
|
| 198 |
extracted = tldextract.extract(domain)
|
| 199 |
real_domain = f"{extracted.domain}.{extracted.suffix}"
|
|
@@ -208,17 +220,45 @@ def get_dns_records(domain):
|
|
| 208 |
except Exception as e:
|
| 209 |
records["WHOIS"] = {"error": str(e)}
|
| 210 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 211 |
return records
|
| 212 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 213 |
def safe_resolve(resolver, qname, rdtype):
|
| 214 |
-
"""Safely resolve DNS queries with error handling."""
|
| 215 |
try:
|
| 216 |
return resolver.resolve(qname, rdtype, raise_on_no_answer=False)
|
| 217 |
except (NoAnswer, NXDOMAIN, Timeout, NoNameservers, DNSException):
|
| 218 |
return []
|
| 219 |
|
| 220 |
def get_authoritative_dns(domain):
|
| 221 |
-
"""
|
| 222 |
try:
|
| 223 |
ns_to_query = '198.41.0.4' # Root server
|
| 224 |
parts = domain.split('.')
|
|
@@ -239,25 +279,23 @@ def get_authoritative_dns(domain):
|
|
| 239 |
return None
|
| 240 |
|
| 241 |
def check_dnssec(domain):
|
| 242 |
-
"""Check DNSSEC status for a domain."""
|
| 243 |
try:
|
| 244 |
resolver = dns.resolver.Resolver()
|
| 245 |
resolver.use_edns(edns=True, payload=4096)
|
| 246 |
ds_records = resolver.resolve(domain, 'DS', raise_on_no_answer=False)
|
| 247 |
dnskey_records = resolver.resolve(domain, 'DNSKEY', raise_on_no_answer=False)
|
| 248 |
if ds_records and dnskey_records:
|
| 249 |
-
return "DNSSEC
|
| 250 |
elif ds_records:
|
| 251 |
-
return "DNSSEC
|
| 252 |
else:
|
| 253 |
-
return "DNSSEC
|
| 254 |
except (NoAnswer, NXDOMAIN, Timeout, NoNameservers):
|
| 255 |
-
return "DNSSEC
|
| 256 |
except Exception as e:
|
| 257 |
-
return f"
|
| 258 |
|
| 259 |
def check_dnsbl(ip_address):
|
| 260 |
-
"""Check IP address against DNS blacklists."""
|
| 261 |
results = defaultdict(list)
|
| 262 |
results_lock = threading.Lock()
|
| 263 |
|
|
@@ -267,7 +305,7 @@ def check_dnsbl(ip_address):
|
|
| 267 |
else:
|
| 268 |
reversed_ip = dns.reversename.from_address(ip_address).to_text(omit_final_dot=True).replace('.ip6.arpa', '')
|
| 269 |
except Exception as e:
|
| 270 |
-
results['error'] = f"IP
|
| 271 |
return results
|
| 272 |
|
| 273 |
def query_worker(dnsbl, description, category):
|
|
@@ -309,32 +347,102 @@ def check_dnsbl(ip_address):
|
|
| 309 |
future.result()
|
| 310 |
except Exception as exc:
|
| 311 |
dnsbl_name = future_to_dnsbl[future]
|
| 312 |
-
logger.error(f'{dnsbl_name}
|
| 313 |
|
| 314 |
return results
|
| 315 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 316 |
def get_country_name(code: str) -> str:
|
| 317 |
-
"""Get country name from country code."""
|
| 318 |
try:
|
| 319 |
country = pycountry.countries.get(alpha_2=code.upper())
|
| 320 |
-
return country.name if country else "
|
| 321 |
except Exception as e:
|
| 322 |
-
logger.error(f"
|
| 323 |
-
return "
|
| 324 |
|
| 325 |
def get_reverse_dns(ip):
|
| 326 |
-
"""Get reverse DNS for an IP address."""
|
| 327 |
try:
|
| 328 |
hostname, _, _ = socket.gethostbyaddr(ip)
|
| 329 |
return hostname
|
| 330 |
except socket.herror:
|
| 331 |
-
return "PTR
|
| 332 |
except Exception as e:
|
| 333 |
-
logger.error(f"Reverse DNS
|
| 334 |
-
return f"
|
| 335 |
|
| 336 |
def get_ping_latency(ip, packet_count=4):
|
| 337 |
-
"""Get ping latency statistics."""
|
| 338 |
try:
|
| 339 |
command = ["ping", "-c", str(packet_count), ip] if os.name != "nt" else ["ping", "-n", str(packet_count), ip]
|
| 340 |
result = subprocess.run(command, capture_output=True, text=True, timeout=10, encoding='utf-8', errors='ignore')
|
|
@@ -343,7 +451,7 @@ def get_ping_latency(ip, packet_count=4):
|
|
| 343 |
output = result.stdout
|
| 344 |
|
| 345 |
if os.name != "nt":
|
| 346 |
-
match = re.search(r"rtt
|
| 347 |
if match:
|
| 348 |
return {
|
| 349 |
"min": f"{match.group(1)} ms",
|
|
@@ -360,20 +468,19 @@ def get_ping_latency(ip, packet_count=4):
|
|
| 360 |
"avg": match.group(3)
|
| 361 |
}
|
| 362 |
|
| 363 |
-
return {"error": "Ping
|
| 364 |
else:
|
| 365 |
-
return {"error": f"Ping
|
| 366 |
except FileNotFoundError:
|
| 367 |
-
return {"error": "Ping
|
| 368 |
except subprocess.TimeoutExpired:
|
| 369 |
-
return {"error": "Ping
|
| 370 |
except Exception as e:
|
| 371 |
-
return {"error": f"
|
| 372 |
|
| 373 |
def scan_ports(ip):
|
| 374 |
-
"""Scan common ports using nmap."""
|
| 375 |
if not NMAP_AVAILABLE:
|
| 376 |
-
return {"error": "Nmap
|
| 377 |
|
| 378 |
try:
|
| 379 |
nm = nmap.PortScanner()
|
|
@@ -401,21 +508,20 @@ def scan_ports(ip):
|
|
| 401 |
open_ports.append({
|
| 402 |
'port': port,
|
| 403 |
'state': port_info['state'],
|
| 404 |
-
'name': port_info.get('name', '
|
| 405 |
'product': port_info.get('product', ''),
|
| 406 |
'version': port_info.get('version', ''),
|
| 407 |
})
|
| 408 |
return {"ports": open_ports}
|
| 409 |
|
| 410 |
except nmap.PortScannerError as e:
|
| 411 |
-
logger.error(f"Nmap
|
| 412 |
-
return {"error": f"Nmap
|
| 413 |
except Exception as e:
|
| 414 |
-
logger.error(f"Port
|
| 415 |
-
return {"error": f"Port
|
| 416 |
|
| 417 |
def get_http_headers(host):
|
| 418 |
-
"""Get HTTP headers for a host."""
|
| 419 |
urls = [f"https://{host}", f"http://{host}"]
|
| 420 |
headers = {'User-Agent': 'Luminet/1.0 (Network Analysis Tool)'}
|
| 421 |
last_error = None
|
|
@@ -447,10 +553,10 @@ def get_http_headers(host):
|
|
| 447 |
}
|
| 448 |
except requests.exceptions.RequestException as e:
|
| 449 |
last_error = e
|
| 450 |
-
logger.warning(f"HTTP
|
| 451 |
continue
|
| 452 |
|
| 453 |
-
return {"error": f"HTTP/HTTPS
|
| 454 |
|
| 455 |
def is_public_ip(ip: str) -> bool:
|
| 456 |
"""Check if an IP address is public/routable."""
|
|
@@ -527,6 +633,7 @@ def get_locations_batch(ips: List[str]) -> List[Optional[Dict]]:
|
|
| 527 |
|
| 528 |
if response.status_code == 200:
|
| 529 |
results = response.json()
|
|
|
|
| 530 |
result_map = {result["query"]: result for result in results if result.get("status") == "success"}
|
| 531 |
locations = []
|
| 532 |
for ip in ips:
|
|
@@ -550,10 +657,12 @@ def get_locations_batch(ips: List[str]) -> List[Optional[Dict]]:
|
|
| 550 |
except Exception as e:
|
| 551 |
logger.error(f"Batch location lookup failed: {str(e)}")
|
| 552 |
|
|
|
|
| 553 |
return [get_location(ip) if is_public_ip(ip) else None for ip in ips]
|
| 554 |
|
| 555 |
def system_traceroute(ip: str, max_hops: int = 30, timeout: int = 60) -> List[Dict]:
|
| 556 |
"""Perform a system traceroute and return hops."""
|
|
|
|
| 557 |
if os.name == "nt":
|
| 558 |
cmd = ["tracert", "-d", "-h", str(max_hops), "-w", "1000", ip]
|
| 559 |
else:
|
|
@@ -572,19 +681,24 @@ def system_traceroute(ip: str, max_hops: int = 30, timeout: int = 60) -> List[Di
|
|
| 572 |
ttl = 1
|
| 573 |
|
| 574 |
for line in proc.stdout.splitlines():
|
|
|
|
| 575 |
if not line.strip() or line.startswith(("traceroute", "tracert", "Tracing")):
|
| 576 |
continue
|
| 577 |
|
|
|
|
| 578 |
if os.name == "nt":
|
|
|
|
| 579 |
parts = line.strip().split()
|
| 580 |
if len(parts) >= 3 and parts[1].replace(".", "").isdigit():
|
| 581 |
hop_ip = parts[1]
|
| 582 |
hops.append({"ip": hop_ip, "ttl": ttl})
|
| 583 |
ttl += 1
|
| 584 |
else:
|
|
|
|
| 585 |
parts = line.strip().split()
|
| 586 |
if len(parts) >= 2 and parts[1] != "*":
|
| 587 |
hop_ip = parts[1]
|
|
|
|
| 588 |
if "(" in hop_ip and ")" in hop_ip:
|
| 589 |
hop_ip = hop_ip[hop_ip.find("(")+1:hop_ip.find(")")]
|
| 590 |
hops.append({"ip": hop_ip, "ttl": ttl})
|
|
@@ -604,14 +718,19 @@ def system_traceroute(ip: str, max_hops: int = 30, timeout: int = 60) -> List[Di
|
|
| 604 |
|
| 605 |
def enriched_traceroute(target_ip: str, max_hops: int = 30) -> List[Dict]:
|
| 606 |
"""Perform traceroute with geolocation information for each hop."""
|
|
|
|
| 607 |
hops = system_traceroute(target_ip, max_hops=max_hops)
|
| 608 |
|
| 609 |
if not hops:
|
| 610 |
return []
|
| 611 |
|
|
|
|
| 612 |
hop_ips = [hop["ip"] for hop in hops]
|
|
|
|
|
|
|
| 613 |
locations = get_locations_batch(hop_ips)
|
| 614 |
|
|
|
|
| 615 |
enriched_hops = []
|
| 616 |
for hop, location in zip(hops, locations):
|
| 617 |
enriched_hop = {
|
|
@@ -624,7 +743,6 @@ def enriched_traceroute(target_ip: str, max_hops: int = 30) -> List[Dict]:
|
|
| 624 |
return enriched_hops
|
| 625 |
|
| 626 |
def get_asn_from_ip(ip):
|
| 627 |
-
"""Get ASN from IP address using RIPEstat."""
|
| 628 |
try:
|
| 629 |
r = requests.get(f"{RIPESTAT_DATA_URL}prefix-overview/data.json?resource={ip}", timeout=5)
|
| 630 |
if r.status_code == 200:
|
|
@@ -634,67 +752,76 @@ def get_asn_from_ip(ip):
|
|
| 634 |
return str(asns[0]["asn"])
|
| 635 |
return None
|
| 636 |
except Exception as e:
|
| 637 |
-
logger.error(f"ASN
|
| 638 |
return None
|
| 639 |
|
| 640 |
-
def
|
| 641 |
-
"""Get authoritative RDAP URL for a query."""
|
| 642 |
-
query = query.strip().upper().replace("AS", "")
|
| 643 |
-
is_ip = "." in query or ":" in query
|
| 644 |
-
rdap_type = "ip" if is_ip else "autnum"
|
| 645 |
-
bootstrap_url = IANA_IP_BOOTSTRAP_URL if is_ip else IANA_ASN_BOOTSTRAP_URL
|
| 646 |
-
|
| 647 |
try:
|
| 648 |
-
|
| 649 |
-
|
| 650 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 651 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 652 |
r.raise_for_status()
|
| 653 |
-
|
| 654 |
-
|
| 655 |
-
|
| 656 |
-
|
| 657 |
-
|
| 658 |
-
|
| 659 |
-
|
| 660 |
-
logger.warning(f"IANA RDAP not implemented, trying fallback RIRs")
|
| 661 |
-
else:
|
| 662 |
-
logger.warning(f"IANA RDAP URL lookup failed: {str(http_err)}")
|
| 663 |
-
except requests.exceptions.RequestException as e:
|
| 664 |
-
logger.warning(f"IANA RDAP connection failed: {str(e)}")
|
| 665 |
-
|
| 666 |
-
fallback_rir_urls = {
|
| 667 |
-
"ripe": f"https://rdap.db.ripe.net/{rdap_type}/{query}",
|
| 668 |
-
"apnic": f"https://rdap.apnic.net/{rdap_type}/{query}",
|
| 669 |
-
"lacnic": f"https://rdap.lacnic.net/rdap/{rdap_type}/{query}",
|
| 670 |
-
"afrinic": f"https://rdap.afrinic.net/rdap/{rdap_type}/{query}",
|
| 671 |
-
"arin": f"https://rdap.arin.net/registry/{rdap_type}/{query}"
|
| 672 |
-
}
|
| 673 |
-
|
| 674 |
-
for rir, url in fallback_rir_urls.items():
|
| 675 |
-
try:
|
| 676 |
-
r = robust_get_request(url, timeout=10, headers={"Accept": "application/rdap+json"})
|
| 677 |
-
if r.status_code == 200:
|
| 678 |
-
logger.info(f"{rir.upper()} RDAP source used: {url}")
|
| 679 |
-
return url
|
| 680 |
-
except requests.exceptions.RequestException:
|
| 681 |
-
continue
|
| 682 |
|
| 683 |
-
|
| 684 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 685 |
|
| 686 |
def parse_vcard(vcard_array):
|
| 687 |
-
"""Parse vCard array from RDAP response."""
|
| 688 |
if not isinstance(vcard_array, list) or len(vcard_array) < 2:
|
| 689 |
return {}
|
| 690 |
|
| 691 |
vcard_data = vcard_array[1]
|
| 692 |
contact = {
|
| 693 |
-
"name": "
|
| 694 |
-
"org": "
|
| 695 |
-
"email": "
|
| 696 |
-
"address": "
|
| 697 |
-
"tel": "
|
| 698 |
}
|
| 699 |
|
| 700 |
for item in vcard_data:
|
|
@@ -719,8 +846,209 @@ def parse_vcard(vcard_array):
|
|
| 719 |
|
| 720 |
return contact
|
| 721 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 722 |
def parse_rdap_response(data):
|
| 723 |
-
"""
|
|
|
|
|
|
|
|
|
|
| 724 |
parsed_info = {
|
| 725 |
"summary": {},
|
| 726 |
"contacts": {
|
|
@@ -731,32 +1059,32 @@ def parse_rdap_response(data):
|
|
| 731 |
"other": []
|
| 732 |
},
|
| 733 |
"details": {},
|
| 734 |
-
"source_rir": "
|
| 735 |
}
|
| 736 |
|
| 737 |
-
#
|
| 738 |
if "port43" in data:
|
| 739 |
port43_val = data["port43"].lower()
|
| 740 |
rir_mapping = {
|
| 741 |
"apnic": "APNIC", "lacnic": "LACNIC", "afrinic": "AfriNIC",
|
| 742 |
"arin": "ARIN", "ripe": "RIPE NCC",
|
| 743 |
}
|
| 744 |
-
parsed_info["source_rir"] = next((v for k, v in rir_mapping.items() if k in port43_val), "
|
| 745 |
|
| 746 |
-
#
|
| 747 |
summary = {
|
| 748 |
-
"name": data.get("name", "
|
| 749 |
-
"handle": data.get("handle", "
|
| 750 |
-
"country": data.get("country", "
|
| 751 |
"ip_range": f"{data.get('startAddress', '')} - {data.get('endAddress', '')}",
|
| 752 |
"asn_range": f"{data.get('startAutnum', '')} - {data.get('endAutnum', '')}",
|
| 753 |
-
"type": data.get("type", "
|
| 754 |
}
|
| 755 |
if summary["ip_range"] == " - ": summary.pop("ip_range", None)
|
| 756 |
if summary["asn_range"] == " - ": summary.pop("asn_range", None)
|
| 757 |
parsed_info["summary"] = summary
|
| 758 |
|
| 759 |
-
#
|
| 760 |
for entity in data.get("entities", []):
|
| 761 |
vcard_array = entity.get("vcardArray")
|
| 762 |
if not vcard_array:
|
|
@@ -764,6 +1092,7 @@ def parse_rdap_response(data):
|
|
| 764 |
contact_details = parse_vcard(vcard_array)
|
| 765 |
roles = entity.get("roles", [])
|
| 766 |
|
|
|
|
| 767 |
assigned = False
|
| 768 |
for role in ["registrant", "administrative", "technical", "abuse"]:
|
| 769 |
if role in roles:
|
|
@@ -772,24 +1101,67 @@ def parse_rdap_response(data):
|
|
| 772 |
if not assigned:
|
| 773 |
parsed_info["contacts"]["other"].append(contact_details)
|
| 774 |
|
| 775 |
-
# Additional details
|
| 776 |
parsed_info["details"] = {
|
| 777 |
"events": data.get("events", []),
|
| 778 |
"remarks": data.get("remarks", []),
|
| 779 |
"links": data.get("links", []),
|
| 780 |
"notices": data.get("notices", []),
|
| 781 |
-
"object_class_name": data.get("objectClassName", "
|
| 782 |
}
|
| 783 |
|
| 784 |
return parsed_info
|
| 785 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 786 |
def get_additional_info_from_ripestat(query):
|
| 787 |
-
"""Get additional information from RIPEstat."""
|
| 788 |
additional_data = {}
|
| 789 |
endpoints = {
|
| 790 |
-
"announced-prefixes": "
|
| 791 |
-
"routing-status": "
|
| 792 |
-
"bgp-state": "BGP
|
| 793 |
}
|
| 794 |
|
| 795 |
for endpoint, title in endpoints.items():
|
|
@@ -800,16 +1172,15 @@ def get_additional_info_from_ripestat(query):
|
|
| 800 |
if r.status_code == 200:
|
| 801 |
additional_data[title] = r.json().get("data", {})
|
| 802 |
else:
|
| 803 |
-
additional_data[title] = {"error": f"HTTP {r.status_code}:
|
| 804 |
except requests.exceptions.RequestException as e:
|
| 805 |
-
additional_data[title] = {"error": f"
|
| 806 |
except Exception as e:
|
| 807 |
-
additional_data[title] = {"error": f"
|
| 808 |
|
| 809 |
return additional_data
|
| 810 |
|
| 811 |
def get_ipinfo_details(ip):
|
| 812 |
-
"""Get IP information from ipinfo.io."""
|
| 813 |
if not re.match(r"^\d{1,3}(?:\.\d{1,3}){3}$", ip) and ":" not in ip:
|
| 814 |
return None
|
| 815 |
|
|
@@ -822,11 +1193,10 @@ def get_ipinfo_details(ip):
|
|
| 822 |
return data
|
| 823 |
return None
|
| 824 |
except requests.exceptions.RequestException as e:
|
| 825 |
-
logger.warning(f"IPinfo
|
| 826 |
return None
|
| 827 |
|
| 828 |
def analyze_ssl(hostname, port=443):
|
| 829 |
-
"""Analyze SSL certificate for a hostname."""
|
| 830 |
try:
|
| 831 |
context = ssl.create_default_context()
|
| 832 |
context.check_hostname = False
|
|
@@ -875,17 +1245,16 @@ def analyze_ssl(hostname, port=443):
|
|
| 875 |
}
|
| 876 |
|
| 877 |
except ssl.SSLError as e:
|
| 878 |
-
logger.error(f"SSL
|
| 879 |
-
return {"error": f"SSL
|
| 880 |
except socket.timeout:
|
| 881 |
-
logger.error(f"
|
| 882 |
-
return {"error": "
|
| 883 |
except Exception as e:
|
| 884 |
-
logger.error(f"
|
| 885 |
-
return {"error": f"
|
| 886 |
|
| 887 |
def _get_alt_names(x509):
|
| 888 |
-
"""Get alternative names from SSL certificate."""
|
| 889 |
alt_names = []
|
| 890 |
for i in range(x509.get_extension_count()):
|
| 891 |
ext = x509.get_extension(i)
|
|
@@ -894,7 +1263,6 @@ def _get_alt_names(x509):
|
|
| 894 |
return alt_names
|
| 895 |
|
| 896 |
def generate_security_report(ipinfo_data, http_headers, port_scan, dnsbl_results, ip):
|
| 897 |
-
"""Generate comprehensive security report."""
|
| 898 |
report = {
|
| 899 |
"proxy_vpn_tor": False,
|
| 900 |
"anonymity_services": [],
|
|
@@ -910,7 +1278,7 @@ def generate_security_report(ipinfo_data, http_headers, port_scan, dnsbl_results
|
|
| 910 |
report["proxy_vpn_tor"] = True
|
| 911 |
detected_str = ", ".join(report["anonymity_services"])
|
| 912 |
report["security_issues"].append(
|
| 913 |
-
f"
|
| 914 |
)
|
| 915 |
|
| 916 |
if ipinfo_data:
|
|
@@ -931,7 +1299,7 @@ def generate_security_report(ipinfo_data, http_headers, port_scan, dnsbl_results
|
|
| 931 |
category_map = {
|
| 932 |
'spam': 'Spam',
|
| 933 |
'proxy_bot': 'Proxy/Bot',
|
| 934 |
-
'anonymity': '
|
| 935 |
}
|
| 936 |
|
| 937 |
service_name = category_map.get(category, category.capitalize())
|
|
@@ -945,7 +1313,7 @@ def generate_security_report(ipinfo_data, http_headers, port_scan, dnsbl_results
|
|
| 945 |
open_critical = [p for p in report["open_ports"] if p["port"] in critical_ports]
|
| 946 |
|
| 947 |
if open_critical:
|
| 948 |
-
report["security_issues"].append(f"{len(open_critical)}
|
| 949 |
|
| 950 |
if http_headers and not http_headers.get("error") and "headers" in http_headers:
|
| 951 |
headers = {k.lower(): v for k, v in http_headers["headers"].items()}
|
|
@@ -968,12 +1336,11 @@ def generate_security_report(ipinfo_data, http_headers, port_scan, dnsbl_results
|
|
| 968 |
if real_name not in headers
|
| 969 |
]
|
| 970 |
if missing_headers:
|
| 971 |
-
report["security_issues"].append(f"{len(missing_headers)}
|
| 972 |
|
| 973 |
return report
|
| 974 |
|
| 975 |
def check_anonymity_services(ip):
|
| 976 |
-
"""Check for anonymity services like Tor, VPN, proxies."""
|
| 977 |
anonymity_services = []
|
| 978 |
proxy_lists = [
|
| 979 |
'proxy.dnsbl.sorbs.net',
|
|
@@ -1011,7 +1378,7 @@ def check_anonymity_services(ip):
|
|
| 1011 |
'tornevall': 'VPN',
|
| 1012 |
'efnetrbl': 'IRC Proxy',
|
| 1013 |
'spamrats': 'Spam Bot',
|
| 1014 |
-
'blocklist': 'Hack
|
| 1015 |
'dronebl': 'Zombie/Botnet'
|
| 1016 |
}
|
| 1017 |
|
|
@@ -1030,26 +1397,24 @@ def check_anonymity_services(ip):
|
|
| 1030 |
return anonymity_services
|
| 1031 |
|
| 1032 |
except Exception as e:
|
| 1033 |
-
logger.error(f"
|
| 1034 |
return []
|
| 1035 |
|
| 1036 |
def get_ip_classification(ip):
|
| 1037 |
-
"""Classify IP address type."""
|
| 1038 |
try:
|
| 1039 |
ip_obj = ipaddress.ip_address(ip)
|
| 1040 |
if ip_obj.is_private:
|
| 1041 |
-
return "Private", "
|
| 1042 |
elif ip_obj.is_multicast:
|
| 1043 |
-
return "Multicast", "
|
| 1044 |
elif ip_obj.is_global:
|
| 1045 |
-
return "Public", "
|
| 1046 |
else:
|
| 1047 |
-
return "Special", "
|
| 1048 |
except ValueError:
|
| 1049 |
-
return "Invalid", "
|
| 1050 |
|
| 1051 |
def robust_get_request(url, retries=3, backoff_factor=0.5, **kwargs):
|
| 1052 |
-
"""Make robust HTTP request with retries."""
|
| 1053 |
headers = kwargs.get('headers', {})
|
| 1054 |
headers.setdefault('User-Agent', 'Luminet/1.0 (Network Analysis Tool)')
|
| 1055 |
kwargs['headers'] = headers
|
|
@@ -1066,22 +1431,23 @@ def robust_get_request(url, retries=3, backoff_factor=0.5, **kwargs):
|
|
| 1066 |
time.sleep(wait_time)
|
| 1067 |
return None
|
| 1068 |
|
| 1069 |
-
#
|
| 1070 |
@app.route("/", methods=["GET", "POST"])
|
| 1071 |
def index():
|
| 1072 |
-
|
| 1073 |
query = request.form.get("query", "").strip() if request.method == "POST" else ""
|
| 1074 |
main_data, additional_data, ipinfo_data, raw_json_str, error = None, None, None, None, None
|
| 1075 |
-
traceroute_locations, origin_asn = None, None
|
| 1076 |
reverse_dns, ping_data, port_scan, http_headers, ssl_info = None, None, None, None, None
|
| 1077 |
-
dnsbl_results, security_report = None, None
|
|
|
|
| 1078 |
|
| 1079 |
-
# DNS
|
| 1080 |
dns_records, authoritative_dns, dnssec_status = None, None, None
|
| 1081 |
|
| 1082 |
if request.method == "POST" and query:
|
| 1083 |
try:
|
| 1084 |
-
#
|
| 1085 |
ip_to_query = query
|
| 1086 |
is_domain = False
|
| 1087 |
try:
|
|
@@ -1089,11 +1455,11 @@ def index():
|
|
| 1089 |
ip_to_query = addr_info[0][4][0]
|
| 1090 |
is_domain = True
|
| 1091 |
except socket.gaierror:
|
| 1092 |
-
pass
|
| 1093 |
except Exception as e:
|
| 1094 |
-
logger.error(f"DNS
|
| 1095 |
|
| 1096 |
-
# RDAP
|
| 1097 |
authoritative_url = get_authoritative_rdap_url(ip_to_query)
|
| 1098 |
if authoritative_url:
|
| 1099 |
try:
|
|
@@ -1106,78 +1472,92 @@ def index():
|
|
| 1106 |
main_data["summary"]["ip_class_desc"] = ip_class_desc
|
| 1107 |
raw_json_str = json.dumps(raw_data, indent=2, ensure_ascii=False)
|
| 1108 |
except requests.exceptions.HTTPError as http_err:
|
| 1109 |
-
error = f"API
|
| 1110 |
except Exception as e:
|
| 1111 |
-
error = f"RDAP
|
| 1112 |
else:
|
| 1113 |
-
error = f"
|
| 1114 |
|
| 1115 |
if error:
|
| 1116 |
logger.error(error)
|
| 1117 |
|
| 1118 |
-
#
|
| 1119 |
if not error:
|
| 1120 |
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
|
|
|
|
| 1121 |
tasks = {
|
| 1122 |
'ipinfo': executor.submit(get_ipinfo_details, ip_to_query),
|
| 1123 |
'ripestat': executor.submit(get_additional_info_from_ripestat, ip_to_query),
|
| 1124 |
'reverse_dns': executor.submit(get_reverse_dns, ip_to_query),
|
| 1125 |
'ping': executor.submit(get_ping_latency, ip_to_query),
|
| 1126 |
'dnsbl': executor.submit(check_dnsbl, ip_to_query),
|
|
|
|
|
|
|
| 1127 |
'traceroute': executor.submit(enriched_traceroute, ip_to_query)
|
| 1128 |
}
|
|
|
|
| 1129 |
results = {name: future.result() for name, future in tasks.items()}
|
| 1130 |
|
| 1131 |
ipinfo_data = results.get('ipinfo')
|
| 1132 |
additional_data = results.get('ripestat')
|
| 1133 |
reverse_dns = results.get('reverse_dns')
|
| 1134 |
ping_data = results.get('ping')
|
|
|
|
| 1135 |
dnsbl_results = results.get('dnsbl')
|
|
|
|
| 1136 |
traceroute_locations = results.get('traceroute')
|
| 1137 |
|
| 1138 |
-
# DNS
|
| 1139 |
if is_domain:
|
| 1140 |
try:
|
| 1141 |
dns_records = get_dns_records(query)
|
| 1142 |
authoritative_dns = get_authoritative_dns(query)
|
| 1143 |
dnssec_status = check_dnssec(query)
|
| 1144 |
except Exception as e:
|
| 1145 |
-
logger.error(f"DNS
|
| 1146 |
-
|
|
|
|
| 1147 |
|
| 1148 |
-
# ASN
|
| 1149 |
-
if ipinfo_data and ipinfo_data.get('
|
| 1150 |
-
origin_asn = ipinfo_data['
|
| 1151 |
else:
|
| 1152 |
origin_asn = get_asn_from_ip(ip_to_query)
|
|
|
|
|
|
|
|
|
|
| 1153 |
|
| 1154 |
-
#
|
| 1155 |
target_host = query if is_domain else ip_to_query
|
| 1156 |
http_headers = get_http_headers(target_host)
|
| 1157 |
if http_headers and not http_headers.get("error"):
|
| 1158 |
ssl_info = analyze_ssl(target_host)
|
| 1159 |
|
| 1160 |
-
# Port scanning
|
| 1161 |
if NMAP_AVAILABLE:
|
| 1162 |
port_scan = scan_ports(ip_to_query)
|
| 1163 |
else:
|
| 1164 |
-
port_scan = {"error": "Nmap
|
|
|
|
|
|
|
| 1165 |
|
| 1166 |
-
# Security report
|
| 1167 |
security_report = generate_security_report(
|
| 1168 |
ipinfo_data, http_headers, port_scan, dnsbl_results, ip_to_query
|
| 1169 |
)
|
| 1170 |
|
|
|
|
|
|
|
|
|
|
| 1171 |
except requests.exceptions.ConnectionError as conn_err:
|
| 1172 |
-
error = f"
|
| 1173 |
logger.error(error)
|
| 1174 |
except Exception as e:
|
| 1175 |
-
error = f"
|
| 1176 |
-
logger.error(f"
|
| 1177 |
|
| 1178 |
elif request.method == "POST" and not query:
|
| 1179 |
-
error = "
|
| 1180 |
|
|
|
|
| 1181 |
return render_template(
|
| 1182 |
"index.html",
|
| 1183 |
query=query,
|
|
@@ -1192,44 +1572,19 @@ def index():
|
|
| 1192 |
port_scan=port_scan,
|
| 1193 |
http_headers=http_headers,
|
| 1194 |
ssl_info=ssl_info,
|
| 1195 |
-
dnsbl_results=dnsbl_results,
|
| 1196 |
-
security_report=security_report,
|
| 1197 |
traceroute_locations=traceroute_locations,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1198 |
dns_records=dns_records,
|
| 1199 |
authoritative_dns=authoritative_dns,
|
| 1200 |
-
dnssec_status=dnssec_status
|
| 1201 |
-
origin_asn=origin_asn
|
| 1202 |
)
|
| 1203 |
|
| 1204 |
-
# API endpoint for external access
|
| 1205 |
-
@app.route("/api/analyze", methods=["POST"])
|
| 1206 |
-
def api_analyze():
|
| 1207 |
-
"""API endpoint for programmatic access."""
|
| 1208 |
-
try:
|
| 1209 |
-
data = request.get_json()
|
| 1210 |
-
if not data or 'ip' not in data:
|
| 1211 |
-
return jsonify({'error': 'IP address is required'}), 400
|
| 1212 |
-
|
| 1213 |
-
ip = data['ip'].strip()
|
| 1214 |
-
if not ip:
|
| 1215 |
-
return jsonify({'error': 'IP address cannot be empty'}), 400
|
| 1216 |
-
|
| 1217 |
-
# Basic analysis
|
| 1218 |
-
ipinfo_data = get_ipinfo_details(ip)
|
| 1219 |
-
ping_data = get_ping_latency(ip)
|
| 1220 |
-
reverse_dns = get_reverse_dns(ip)
|
| 1221 |
-
|
| 1222 |
-
return jsonify({
|
| 1223 |
-
'ip': ip,
|
| 1224 |
-
'ipinfo': ipinfo_data,
|
| 1225 |
-
'ping': ping_data,
|
| 1226 |
-
'reverse_dns': reverse_dns,
|
| 1227 |
-
})
|
| 1228 |
-
|
| 1229 |
-
except Exception as e:
|
| 1230 |
-
logger.error(f"API error: {str(e)}")
|
| 1231 |
-
return jsonify({'error': 'Internal server error'}), 500
|
| 1232 |
-
|
| 1233 |
if __name__ == "__main__":
|
| 1234 |
port = int(os.environ.get("PORT", 7860))
|
| 1235 |
app.run(host="0.0.0.0", port=port, debug=False)
|
|
|
|
| 1 |
+
import os, re, time, json, socket, logging, threading, hashlib, secrets, ipaddress, subprocess, concurrent.futures, requests, pycountry, ssl
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2 |
from datetime import datetime, timedelta, timezone
|
| 3 |
+
from functools import wraps
|
| 4 |
from collections import defaultdict, Counter
|
| 5 |
from flask import Flask, render_template, request, jsonify
|
| 6 |
from OpenSSL import crypto
|
|
|
|
| 11 |
from dns.exception import DNSException
|
| 12 |
from dns.query import BadResponse
|
| 13 |
|
| 14 |
+
# Opsiyonel bağımlılıklar için import denemeleri
|
| 15 |
try:
|
| 16 |
import scapy.all as scapy
|
| 17 |
SCAPY_AVAILABLE = True
|
|
|
|
| 21 |
try:
|
| 22 |
import nmap
|
| 23 |
NMAP_AVAILABLE = True
|
| 24 |
+
except ImportError:
|
| 25 |
NMAP_AVAILABLE = False
|
| 26 |
|
| 27 |
+
# Flask uygulaması ve konfigürasyon
|
| 28 |
app = Flask(__name__)
|
| 29 |
+
app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY', secrets.token_hex(32))
|
| 30 |
app.jinja_env.globals.update(json=json)
|
| 31 |
app.jinja_env.filters['tojson'] = json.dumps
|
| 32 |
|
| 33 |
+
# Loglama konfigürasyonu
|
| 34 |
logging.basicConfig(level=logging.INFO)
|
| 35 |
logger = logging.getLogger(__name__)
|
| 36 |
|
| 37 |
+
# API endpointleri
|
| 38 |
IANA_IP_BOOTSTRAP_URL = "https://rdap.iana.org/ip/"
|
| 39 |
IANA_ASN_BOOTSTRAP_URL = "https://rdap.iana.org/autnum/"
|
| 40 |
RIPESTAT_DATA_URL = "https://stat.ripe.net/data/"
|
|
|
|
| 42 |
IP_API_URL = "http://ip-api.com/json/"
|
| 43 |
OSM_NOMINATIM_URL = "https://nominatim.openstreetmap.org/search"
|
| 44 |
|
| 45 |
+
# Traceroute için API rate limitini göz önünde bulundur
|
| 46 |
REQUEST_DELAY = 0.1
|
| 47 |
|
| 48 |
+
# DNSBL listeleri
|
| 49 |
DNS_BL_LISTS = {
|
| 50 |
'spam': {
|
| 51 |
'zen.spamhaus.org': 'Spamhaus Zen (SBL+XBL+PBL)',
|
|
|
|
| 58 |
'rbl.0spam.org': '0Spam Realtime BL',
|
| 59 |
'bl.nordspam.com': 'NordSpam IP BL',
|
| 60 |
},
|
| 61 |
+
|
| 62 |
'domain': {
|
| 63 |
'dbl.0spam.org': '0Spam Domain BL',
|
| 64 |
'dbl.nordspam.com': 'NordSpam Domain BL',
|
| 65 |
'uribl.spameatingmonkey.net': 'SEM URI BL',
|
| 66 |
'urired.spameatingmonkey.net': 'SEM URI Red',
|
| 67 |
},
|
| 68 |
+
|
| 69 |
'network': {
|
| 70 |
'b.barracudacentral.org': 'Barracuda Reputation Block List',
|
| 71 |
'dnsbl.sorbs.net': 'SORBS Aggregate',
|
|
|
|
| 78 |
'bl.ipv6.spameatingmonkey.net': 'SEM IPv6 BL',
|
| 79 |
'nbl.0spam.org': '0Spam Network BL',
|
| 80 |
},
|
| 81 |
+
|
| 82 |
'proxy_bot': {
|
| 83 |
'dnsbl.dronebl.org': 'DroneBL',
|
| 84 |
'tor.dan.me.uk': 'Tor Exit Nodes',
|
|
|
|
| 89 |
'socks.dnsbl.sorbs.net': 'SORBS SOCKS Proxies',
|
| 90 |
'zombie.dnsbl.sorbs.net': 'SORBS Zombies',
|
| 91 |
},
|
| 92 |
+
|
| 93 |
'misc': {
|
| 94 |
'psbl.surriel.com': 'Passive Spam Block List',
|
| 95 |
'db.wpbl.info': 'Weighted Private Block List',
|
|
|
|
| 102 |
}
|
| 103 |
|
| 104 |
def get_dns_records(domain):
|
|
|
|
| 105 |
records = {
|
| 106 |
"A": [], "AAAA": [], "MX": [], "TXT": [], "NS": [], "CNAME": None, "SOA": None,
|
| 107 |
"SRV": [], "CAA": [], "DMARC": [], "SPF": [], "PTR": [], "NAPTR": [], "SSHFP": [],
|
|
|
|
| 112 |
resolver.timeout = 5
|
| 113 |
resolver.lifetime = 5
|
| 114 |
|
| 115 |
+
# A
|
| 116 |
for rdata in safe_resolve(resolver, domain, 'A'):
|
| 117 |
records["A"].append(rdata.address)
|
| 118 |
|
| 119 |
+
# AAAA
|
| 120 |
for rdata in safe_resolve(resolver, domain, 'AAAA'):
|
| 121 |
records["AAAA"].append(rdata.address)
|
| 122 |
|
| 123 |
+
# PTR
|
| 124 |
for ip_addr in records["A"] + records["AAAA"]:
|
| 125 |
try:
|
| 126 |
rev_name = dns.reversename.from_address(ip_addr)
|
|
|
|
| 129 |
except Exception:
|
| 130 |
pass
|
| 131 |
|
| 132 |
+
# MX
|
| 133 |
for rdata in safe_resolve(resolver, domain, 'MX'):
|
| 134 |
records["MX"].append({"preference": rdata.preference, "exchange": rdata.exchange.to_text()})
|
| 135 |
|
| 136 |
+
# TXT / SPF
|
| 137 |
for rdata in safe_resolve(resolver, domain, 'TXT'):
|
| 138 |
for txt_string in rdata.strings:
|
| 139 |
txt_record = txt_string.decode('utf-8')
|
|
|
|
| 141 |
if txt_record.lower().startswith('v=spf'):
|
| 142 |
records["SPF"].append(txt_record)
|
| 143 |
|
| 144 |
+
# DMARC
|
| 145 |
for rdata in safe_resolve(resolver, f'_dmarc.{domain}', 'TXT'):
|
| 146 |
for txt_string in rdata.strings:
|
| 147 |
records["DMARC"].append(txt_string.decode('utf-8'))
|
| 148 |
|
| 149 |
+
# NS
|
| 150 |
for rdata in safe_resolve(resolver, domain, 'NS'):
|
| 151 |
records["NS"].append(rdata.target.to_text())
|
| 152 |
|
| 153 |
+
# CNAME
|
| 154 |
cname_result = safe_resolve(resolver, domain, 'CNAME')
|
| 155 |
if cname_result:
|
| 156 |
records["CNAME"] = cname_result[0].target.to_text()
|
| 157 |
|
| 158 |
+
# SOA
|
| 159 |
soa_result = safe_resolve(resolver, domain, 'SOA')
|
| 160 |
if soa_result:
|
| 161 |
soa = soa_result[0]
|
|
|
|
| 169 |
"minimum": soa.minimum
|
| 170 |
}
|
| 171 |
|
| 172 |
+
# SRV
|
| 173 |
+
for rdata in safe_resolve(resolver, domain, 'SRV'):
|
| 174 |
+
records["SRV"].append({
|
| 175 |
+
"port": rdata.port,
|
| 176 |
+
"target": rdata.target.to_text(),
|
| 177 |
+
"priority": rdata.priority,
|
| 178 |
+
"weight": rdata.weight
|
| 179 |
+
})
|
| 180 |
+
|
| 181 |
+
# CAA
|
| 182 |
+
for rdata in safe_resolve(resolver, domain, 'CAA'):
|
| 183 |
+
records["CAA"].append({
|
| 184 |
+
"flags": rdata.flags,
|
| 185 |
+
"tag": rdata.tag.decode('utf-8'),
|
| 186 |
+
"value": rdata.value.decode('utf-8')
|
| 187 |
+
})
|
| 188 |
+
|
| 189 |
+
# Ek kayıtlar
|
| 190 |
+
for rdata in safe_resolve(resolver, domain, 'NAPTR'):
|
| 191 |
+
records["NAPTR"].append(str(rdata))
|
| 192 |
+
|
| 193 |
+
for rdata in safe_resolve(resolver, domain, 'SSHFP'):
|
| 194 |
+
records["SSHFP"].append(str(rdata))
|
| 195 |
+
|
| 196 |
+
for rdata in safe_resolve(resolver, domain, 'TLSA'):
|
| 197 |
+
records["TLSA"].append(str(rdata))
|
| 198 |
|
| 199 |
+
for rdata in safe_resolve(resolver, domain, 'LOC'):
|
| 200 |
+
records["LOC"].append(str(rdata))
|
| 201 |
+
|
| 202 |
+
for rdata in safe_resolve(resolver, domain, 'DS'):
|
| 203 |
+
records["DS"].append(str(rdata))
|
| 204 |
+
|
| 205 |
+
for rdata in safe_resolve(resolver, domain, 'DNSKEY'):
|
| 206 |
+
records["DNSKEY"].append(str(rdata))
|
| 207 |
+
|
| 208 |
+
# WHOIS - sadece gerçek domainlerde çalıştır
|
| 209 |
try:
|
| 210 |
extracted = tldextract.extract(domain)
|
| 211 |
real_domain = f"{extracted.domain}.{extracted.suffix}"
|
|
|
|
| 220 |
except Exception as e:
|
| 221 |
records["WHOIS"] = {"error": str(e)}
|
| 222 |
|
| 223 |
+
# Zone transfer testi
|
| 224 |
+
try:
|
| 225 |
+
ns_records = safe_resolve(resolver, domain, 'NS')
|
| 226 |
+
for ns in ns_records:
|
| 227 |
+
try:
|
| 228 |
+
ns_ip = safe_resolve(resolver, ns.target, 'A')[0].to_text()
|
| 229 |
+
xfr = dns.query.xfr(ns_ip, domain, timeout=5)
|
| 230 |
+
zone = dns.zone.from_xfr(xfr)
|
| 231 |
+
records["ZONE_TRANSFER"] = list(zone.nodes.keys())
|
| 232 |
+
except Exception:
|
| 233 |
+
pass
|
| 234 |
+
except Exception:
|
| 235 |
+
pass
|
| 236 |
+
|
| 237 |
return records
|
| 238 |
|
| 239 |
+
def whois_info(domain):
|
| 240 |
+
try:
|
| 241 |
+
extracted = tldextract.extract(domain)
|
| 242 |
+
real_domain = f"{extracted.domain}.{extracted.suffix}"
|
| 243 |
+
w = whois.whois(real_domain)
|
| 244 |
+
return {
|
| 245 |
+
"registrar": w.registrar,
|
| 246 |
+
"creation_date": str(w.creation_date),
|
| 247 |
+
"expiration_date": str(w.expiration_date),
|
| 248 |
+
"name_servers": w.name_servers,
|
| 249 |
+
"emails": w.emails
|
| 250 |
+
}
|
| 251 |
+
except Exception as e:
|
| 252 |
+
return {"error": str(e)}
|
| 253 |
+
|
| 254 |
def safe_resolve(resolver, qname, rdtype):
|
|
|
|
| 255 |
try:
|
| 256 |
return resolver.resolve(qname, rdtype, raise_on_no_answer=False)
|
| 257 |
except (NoAnswer, NXDOMAIN, Timeout, NoNameservers, DNSException):
|
| 258 |
return []
|
| 259 |
|
| 260 |
def get_authoritative_dns(domain):
|
| 261 |
+
"""Gerçek alan adının yetkili DNS sunucusunu bulur."""
|
| 262 |
try:
|
| 263 |
ns_to_query = '198.41.0.4' # Root server
|
| 264 |
parts = domain.split('.')
|
|
|
|
| 279 |
return None
|
| 280 |
|
| 281 |
def check_dnssec(domain):
|
|
|
|
| 282 |
try:
|
| 283 |
resolver = dns.resolver.Resolver()
|
| 284 |
resolver.use_edns(edns=True, payload=4096)
|
| 285 |
ds_records = resolver.resolve(domain, 'DS', raise_on_no_answer=False)
|
| 286 |
dnskey_records = resolver.resolve(domain, 'DNSKEY', raise_on_no_answer=False)
|
| 287 |
if ds_records and dnskey_records:
|
| 288 |
+
return "DNSSEC Etkin (DS ve DNSKEY kayıtları bulundu)"
|
| 289 |
elif ds_records:
|
| 290 |
+
return "DNSSEC Kısmen Etkin (Sadece DS kaydı bulundu)"
|
| 291 |
else:
|
| 292 |
+
return "DNSSEC Etkin Değil"
|
| 293 |
except (NoAnswer, NXDOMAIN, Timeout, NoNameservers):
|
| 294 |
+
return "DNSSEC Durumu Belirlenemedi"
|
| 295 |
except Exception as e:
|
| 296 |
+
return f"Hata: {str(e)}"
|
| 297 |
|
| 298 |
def check_dnsbl(ip_address):
|
|
|
|
| 299 |
results = defaultdict(list)
|
| 300 |
results_lock = threading.Lock()
|
| 301 |
|
|
|
|
| 305 |
else:
|
| 306 |
reversed_ip = dns.reversename.from_address(ip_address).to_text(omit_final_dot=True).replace('.ip6.arpa', '')
|
| 307 |
except Exception as e:
|
| 308 |
+
results['error'] = f"IP adresi çevrim hatası: {str(e)}"
|
| 309 |
return results
|
| 310 |
|
| 311 |
def query_worker(dnsbl, description, category):
|
|
|
|
| 347 |
future.result()
|
| 348 |
except Exception as exc:
|
| 349 |
dnsbl_name = future_to_dnsbl[future]
|
| 350 |
+
logger.error(f'{dnsbl_name} sorgusunda beklenmedik hata oluştu: {exc}')
|
| 351 |
|
| 352 |
return results
|
| 353 |
|
| 354 |
+
def process_bgp_data_for_d3(bgp_data, target_asn_str=None):
|
| 355 |
+
if not bgp_data or 'bgp_state' not in bgp_data or not bgp_data['bgp_state']:
|
| 356 |
+
return None
|
| 357 |
+
|
| 358 |
+
def get_color_for_path(path):
|
| 359 |
+
key = '-'.join(map(str, path))
|
| 360 |
+
hash_digest = hashlib.md5(key.encode()).hexdigest()
|
| 361 |
+
return f"#{hash_digest[:6]}"
|
| 362 |
+
|
| 363 |
+
def get_color_for_asn(asn):
|
| 364 |
+
key = str(asn)
|
| 365 |
+
hash_digest = hashlib.md5(key.encode()).hexdigest()
|
| 366 |
+
return f"#{hash_digest[6:12]}"
|
| 367 |
+
|
| 368 |
+
nodes = {}
|
| 369 |
+
raw_links = []
|
| 370 |
+
source_asns = set()
|
| 371 |
+
target_asn = int(target_asn_str) if target_asn_str and target_asn_str.isdigit() else None
|
| 372 |
+
|
| 373 |
+
for entry in bgp_data.get('bgp_state', []):
|
| 374 |
+
path = entry.get('path', [])
|
| 375 |
+
if not path:
|
| 376 |
+
continue
|
| 377 |
+
|
| 378 |
+
source_asns.add(path[0])
|
| 379 |
+
path_color = get_color_for_path(path)
|
| 380 |
+
|
| 381 |
+
path_details = {
|
| 382 |
+
"community": entry.get("community", []),
|
| 383 |
+
"full_path": entry.get("path", []),
|
| 384 |
+
"source_id": entry.get("source_id", "Bilinmiyor"),
|
| 385 |
+
"target_prefix": entry.get("target_prefix", "Bilinmiyor")
|
| 386 |
+
}
|
| 387 |
+
|
| 388 |
+
for asn in path:
|
| 389 |
+
if asn not in nodes:
|
| 390 |
+
nodes[asn] = {"id": str(asn), "label": f"AS{asn}", "color": get_color_for_asn(asn), "node_type": "transit"}
|
| 391 |
+
|
| 392 |
+
for i in range(len(path) - 1):
|
| 393 |
+
link_data = {
|
| 394 |
+
"source": str(path[i]),
|
| 395 |
+
"target": str(path[i+1]),
|
| 396 |
+
"color": path_color,
|
| 397 |
+
**path_details
|
| 398 |
+
}
|
| 399 |
+
raw_links.append(link_data)
|
| 400 |
+
|
| 401 |
+
for asn, node_data in nodes.items():
|
| 402 |
+
if asn == target_asn:
|
| 403 |
+
node_data["type"] = "target"
|
| 404 |
+
elif asn in source_asns:
|
| 405 |
+
node_data["type"] = "source"
|
| 406 |
+
if not raw_links:
|
| 407 |
+
return None
|
| 408 |
+
|
| 409 |
+
link_counts = defaultdict(int)
|
| 410 |
+
for link in raw_links:
|
| 411 |
+
key = tuple(sorted((link['source'], link['target'])))
|
| 412 |
+
link_counts[key] += 1
|
| 413 |
+
|
| 414 |
+
processed_links = []
|
| 415 |
+
link_group_indices = defaultdict(int)
|
| 416 |
+
for link in raw_links:
|
| 417 |
+
key = tuple(sorted((link['source'], link['target'])))
|
| 418 |
+
total_links_in_group = link_counts[key]
|
| 419 |
+
new_link = link.copy()
|
| 420 |
+
new_link['total_links'] = total_links_in_group
|
| 421 |
+
new_link['link_index'] = link_group_indices[key]
|
| 422 |
+
processed_links.append(new_link)
|
| 423 |
+
link_group_indices[key] += 1
|
| 424 |
+
|
| 425 |
+
return {"nodes": list(nodes.values()), "links": processed_links}
|
| 426 |
+
|
| 427 |
def get_country_name(code: str) -> str:
|
|
|
|
| 428 |
try:
|
| 429 |
country = pycountry.countries.get(alpha_2=code.upper())
|
| 430 |
+
return country.name if country else "Bilinmeyen Ülke"
|
| 431 |
except Exception as e:
|
| 432 |
+
logger.error(f"Ülke ismi alınamadı: {str(e)}")
|
| 433 |
+
return "Geçersiz Kod"
|
| 434 |
|
| 435 |
def get_reverse_dns(ip):
|
|
|
|
| 436 |
try:
|
| 437 |
hostname, _, _ = socket.gethostbyaddr(ip)
|
| 438 |
return hostname
|
| 439 |
except socket.herror:
|
| 440 |
+
return "PTR kaydı bulunamadı"
|
| 441 |
except Exception as e:
|
| 442 |
+
logger.error(f"Reverse DNS hatası: {str(e)}")
|
| 443 |
+
return f"Hata: {str(e)}"
|
| 444 |
|
| 445 |
def get_ping_latency(ip, packet_count=4):
|
|
|
|
| 446 |
try:
|
| 447 |
command = ["ping", "-c", str(packet_count), ip] if os.name != "nt" else ["ping", "-n", str(packet_count), ip]
|
| 448 |
result = subprocess.run(command, capture_output=True, text=True, timeout=10, encoding='utf-8', errors='ignore')
|
|
|
|
| 451 |
output = result.stdout
|
| 452 |
|
| 453 |
if os.name != "nt":
|
| 454 |
+
match = re.search(r"rtt en düşük/ortalama/en yüksek/mdev = ([\d.]+)/([\d.]+)/([\d.]+)/([\d.]+) ms", output)
|
| 455 |
if match:
|
| 456 |
return {
|
| 457 |
"min": f"{match.group(1)} ms",
|
|
|
|
| 468 |
"avg": match.group(3)
|
| 469 |
}
|
| 470 |
|
| 471 |
+
return {"error": "Ping sonuçları regex ile eşleşmedi", "raw": output}
|
| 472 |
else:
|
| 473 |
+
return {"error": f"Ping komutu hata kodu döndürdü ({result.returncode}): {result.stderr or result.stdout}", "raw": result.stdout + result.stderr}
|
| 474 |
except FileNotFoundError:
|
| 475 |
+
return {"error": "Ping komutu bulunamadı. PATH ayarınızı veya komutun konumunu kontrol edin."}
|
| 476 |
except subprocess.TimeoutExpired:
|
| 477 |
+
return {"error": "Ping isteği zaman aşımına uğradı"}
|
| 478 |
except Exception as e:
|
| 479 |
+
return {"error": f"Beklenmedik hata: {str(e)}"}
|
| 480 |
|
| 481 |
def scan_ports(ip):
|
|
|
|
| 482 |
if not NMAP_AVAILABLE:
|
| 483 |
+
return {"error": "Nmap kütüphanesi kurulu değil"}
|
| 484 |
|
| 485 |
try:
|
| 486 |
nm = nmap.PortScanner()
|
|
|
|
| 508 |
open_ports.append({
|
| 509 |
'port': port,
|
| 510 |
'state': port_info['state'],
|
| 511 |
+
'name': port_info.get('name', 'bilinmiyor'),
|
| 512 |
'product': port_info.get('product', ''),
|
| 513 |
'version': port_info.get('version', ''),
|
| 514 |
})
|
| 515 |
return {"ports": open_ports}
|
| 516 |
|
| 517 |
except nmap.PortScannerError as e:
|
| 518 |
+
logger.error(f"Nmap hatası: {str(e)}")
|
| 519 |
+
return {"error": f"Nmap hatası: {str(e)}"}
|
| 520 |
except Exception as e:
|
| 521 |
+
logger.error(f"Port tarama hatası: {str(e)}")
|
| 522 |
+
return {"error": f"Port tarama hatası: {str(e)}"}
|
| 523 |
|
| 524 |
def get_http_headers(host):
|
|
|
|
| 525 |
urls = [f"https://{host}", f"http://{host}"]
|
| 526 |
headers = {'User-Agent': 'Luminet/1.0 (Network Analysis Tool)'}
|
| 527 |
last_error = None
|
|
|
|
| 553 |
}
|
| 554 |
except requests.exceptions.RequestException as e:
|
| 555 |
last_error = e
|
| 556 |
+
logger.warning(f"HTTP başlık hatası: {str(e)}")
|
| 557 |
continue
|
| 558 |
|
| 559 |
+
return {"error": f"HTTP/HTTPS bağlantısı kurulamadı: {str(last_error)}"}
|
| 560 |
|
| 561 |
def is_public_ip(ip: str) -> bool:
|
| 562 |
"""Check if an IP address is public/routable."""
|
|
|
|
| 633 |
|
| 634 |
if response.status_code == 200:
|
| 635 |
results = response.json()
|
| 636 |
+
# Map results back to original IP list
|
| 637 |
result_map = {result["query"]: result for result in results if result.get("status") == "success"}
|
| 638 |
locations = []
|
| 639 |
for ip in ips:
|
|
|
|
| 657 |
except Exception as e:
|
| 658 |
logger.error(f"Batch location lookup failed: {str(e)}")
|
| 659 |
|
| 660 |
+
# Fallback to individual lookups if batch fails
|
| 661 |
return [get_location(ip) if is_public_ip(ip) else None for ip in ips]
|
| 662 |
|
| 663 |
def system_traceroute(ip: str, max_hops: int = 30, timeout: int = 60) -> List[Dict]:
|
| 664 |
"""Perform a system traceroute and return hops."""
|
| 665 |
+
# Determine OS and appropriate command
|
| 666 |
if os.name == "nt":
|
| 667 |
cmd = ["tracert", "-d", "-h", str(max_hops), "-w", "1000", ip]
|
| 668 |
else:
|
|
|
|
| 681 |
ttl = 1
|
| 682 |
|
| 683 |
for line in proc.stdout.splitlines():
|
| 684 |
+
# Skip header lines
|
| 685 |
if not line.strip() or line.startswith(("traceroute", "tracert", "Tracing")):
|
| 686 |
continue
|
| 687 |
|
| 688 |
+
# Parse line based on OS
|
| 689 |
if os.name == "nt":
|
| 690 |
+
# Windows tracert format
|
| 691 |
parts = line.strip().split()
|
| 692 |
if len(parts) >= 3 and parts[1].replace(".", "").isdigit():
|
| 693 |
hop_ip = parts[1]
|
| 694 |
hops.append({"ip": hop_ip, "ttl": ttl})
|
| 695 |
ttl += 1
|
| 696 |
else:
|
| 697 |
+
# Unix traceroute format
|
| 698 |
parts = line.strip().split()
|
| 699 |
if len(parts) >= 2 and parts[1] != "*":
|
| 700 |
hop_ip = parts[1]
|
| 701 |
+
# Handle cases where IP is in parentheses
|
| 702 |
if "(" in hop_ip and ")" in hop_ip:
|
| 703 |
hop_ip = hop_ip[hop_ip.find("(")+1:hop_ip.find(")")]
|
| 704 |
hops.append({"ip": hop_ip, "ttl": ttl})
|
|
|
|
| 718 |
|
| 719 |
def enriched_traceroute(target_ip: str, max_hops: int = 30) -> List[Dict]:
|
| 720 |
"""Perform traceroute with geolocation information for each hop."""
|
| 721 |
+
# First get all hops
|
| 722 |
hops = system_traceroute(target_ip, max_hops=max_hops)
|
| 723 |
|
| 724 |
if not hops:
|
| 725 |
return []
|
| 726 |
|
| 727 |
+
# Extract IPs for batch lookup
|
| 728 |
hop_ips = [hop["ip"] for hop in hops]
|
| 729 |
+
|
| 730 |
+
# Get locations in batch if possible
|
| 731 |
locations = get_locations_batch(hop_ips)
|
| 732 |
|
| 733 |
+
# Enrich hop data with location information
|
| 734 |
enriched_hops = []
|
| 735 |
for hop, location in zip(hops, locations):
|
| 736 |
enriched_hop = {
|
|
|
|
| 743 |
return enriched_hops
|
| 744 |
|
| 745 |
def get_asn_from_ip(ip):
|
|
|
|
| 746 |
try:
|
| 747 |
r = requests.get(f"{RIPESTAT_DATA_URL}prefix-overview/data.json?resource={ip}", timeout=5)
|
| 748 |
if r.status_code == 200:
|
|
|
|
| 752 |
return str(asns[0]["asn"])
|
| 753 |
return None
|
| 754 |
except Exception as e:
|
| 755 |
+
logger.error(f"ASN alınamadı: {str(e)}")
|
| 756 |
return None
|
| 757 |
|
| 758 |
+
def get_asn_path_locations(asn):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 759 |
try:
|
| 760 |
+
url = f"https://api.bgpview.io/asn/{asn}/prefixes"
|
| 761 |
+
r = requests.get(url, timeout=10)
|
| 762 |
+
if r.status_code == 200:
|
| 763 |
+
data = r.json()
|
| 764 |
+
prefixes = data.get("data", {}).get("ipv4_prefixes", []) + data.get("data", {}).get("ipv6_prefixes", [])
|
| 765 |
+
countries = [p.get("country_code") for p in prefixes if p.get("country_code")]
|
| 766 |
+
return Counter(countries)
|
| 767 |
+
return Counter()
|
| 768 |
+
except Exception as e:
|
| 769 |
+
logger.error(f"BGPView ASN path hatası: {str(e)}")
|
| 770 |
+
return Counter()
|
| 771 |
|
| 772 |
+
def lookup_coords_osm(country_code):
|
| 773 |
+
try:
|
| 774 |
+
if not country_code:
|
| 775 |
+
return None, None
|
| 776 |
+
|
| 777 |
+
country_name = get_country_name(country_code)
|
| 778 |
+
if not country_name:
|
| 779 |
+
return None, None
|
| 780 |
+
|
| 781 |
+
params = {"q": country_name, "format": "json", "limit": 1}
|
| 782 |
+
headers = {"User-Agent": "Luminet/1.0"}
|
| 783 |
+
r = requests.get(OSM_NOMINATIM_URL, params=params, headers=headers, timeout=5)
|
| 784 |
r.raise_for_status()
|
| 785 |
+
results = r.json()
|
| 786 |
+
if results:
|
| 787 |
+
return float(results[0]["lat"]), float(results[0]["lon"])
|
| 788 |
+
return None, None
|
| 789 |
+
except Exception as e:
|
| 790 |
+
logger.error(f"[OSM Lookup Hatası] {country_code}: {str(e)}")
|
| 791 |
+
return None, None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 792 |
|
| 793 |
+
def get_asn_map_data(ip):
|
| 794 |
+
asn = get_asn_from_ip(ip)
|
| 795 |
+
if not asn:
|
| 796 |
+
return (None, [])
|
| 797 |
+
|
| 798 |
+
location_counts = get_asn_path_locations(asn)
|
| 799 |
+
map_points = []
|
| 800 |
+
|
| 801 |
+
for country_code, count in location_counts.most_common(15):
|
| 802 |
+
lat, lon = lookup_coords_osm(country_code)
|
| 803 |
+
if lat and lon:
|
| 804 |
+
map_points.append({
|
| 805 |
+
"loc": country_code,
|
| 806 |
+
"count": count,
|
| 807 |
+
"lat": lat,
|
| 808 |
+
"lon": lon,
|
| 809 |
+
"country_name": get_country_name(country_code)
|
| 810 |
+
})
|
| 811 |
+
|
| 812 |
+
return (asn, map_points)
|
| 813 |
|
| 814 |
def parse_vcard(vcard_array):
|
|
|
|
| 815 |
if not isinstance(vcard_array, list) or len(vcard_array) < 2:
|
| 816 |
return {}
|
| 817 |
|
| 818 |
vcard_data = vcard_array[1]
|
| 819 |
contact = {
|
| 820 |
+
"name": "Bilinmiyor",
|
| 821 |
+
"org": "Bilinmiyor",
|
| 822 |
+
"email": "Bilinmiyor",
|
| 823 |
+
"address": "Bilinmiyor",
|
| 824 |
+
"tel": "Bilinmiyor"
|
| 825 |
}
|
| 826 |
|
| 827 |
for item in vcard_data:
|
|
|
|
| 846 |
|
| 847 |
return contact
|
| 848 |
|
| 849 |
+
def run_mtr_analysis(ip):
|
| 850 |
+
if not ip or not isinstance(ip, str):
|
| 851 |
+
return {"error": "Geçersiz IP adresi veya hostname."}
|
| 852 |
+
|
| 853 |
+
try:
|
| 854 |
+
ipaddress.ip_address(ip)
|
| 855 |
+
except ValueError:
|
| 856 |
+
pass
|
| 857 |
+
|
| 858 |
+
command = [
|
| 859 |
+
"mtr",
|
| 860 |
+
"-r",
|
| 861 |
+
"-c", "10",
|
| 862 |
+
"--json",
|
| 863 |
+
ip
|
| 864 |
+
]
|
| 865 |
+
|
| 866 |
+
result = {"ip": ip, "timestamp": datetime.now().isoformat()}
|
| 867 |
+
|
| 868 |
+
try:
|
| 869 |
+
process = subprocess.Popen(
|
| 870 |
+
command,
|
| 871 |
+
stdout=subprocess.PIPE,
|
| 872 |
+
stderr=subprocess.PIPE,
|
| 873 |
+
text=True,
|
| 874 |
+
bufsize=1
|
| 875 |
+
)
|
| 876 |
+
|
| 877 |
+
stdout, stderr = process.communicate(timeout=60)
|
| 878 |
+
|
| 879 |
+
if process.returncode != 0:
|
| 880 |
+
error_msg = stderr.strip()
|
| 881 |
+
if "Name or service not known" in error_msg or "Temporary failure in name resolution" in error_msg:
|
| 882 |
+
user_error = f"Hedef hostname çözümlenemedi: {ip}. Lütfen doğru bir IP veya hostname girin."
|
| 883 |
+
elif "No such device" in error_msg or "Cannot find device" in error_msg:
|
| 884 |
+
user_error = "Ağ arayüzü hatası: MTR gerekli ağ cihazını bulamadı."
|
| 885 |
+
elif "Operation not permitted" in error_msg or "Permission denied" in error_msg:
|
| 886 |
+
user_error = "İzin hatası: MTR çalıştırmak için yetki gerekli."
|
| 887 |
+
else:
|
| 888 |
+
user_error = "MTR analizi başarısız oldu."
|
| 889 |
+
|
| 890 |
+
logger.error(f"MTR command failed for {ip}: {error_msg} (Exit code: {process.returncode})")
|
| 891 |
+
result.update({
|
| 892 |
+
"error": user_error,
|
| 893 |
+
"details": error_msg,
|
| 894 |
+
"raw_output": stdout,
|
| 895 |
+
"raw_error": stderr
|
| 896 |
+
})
|
| 897 |
+
return result
|
| 898 |
+
|
| 899 |
+
if not stdout.strip():
|
| 900 |
+
logger.error(f"MTR returned empty output for {ip}.")
|
| 901 |
+
result["error"] = "MTR boş sonuç döndürdü. Hedef erişilemiyor olabilir veya MTR çıktısı beklenenden farklı."
|
| 902 |
+
result["raw_output"] = stdout
|
| 903 |
+
result["raw_error"] = stderr
|
| 904 |
+
return result
|
| 905 |
+
|
| 906 |
+
try:
|
| 907 |
+
mtr_data = json.loads(stdout)
|
| 908 |
+
except json.JSONDecodeError as e:
|
| 909 |
+
logger.error(f"MTR JSON decoding error for {ip}: {e}. Raw stdout: {stdout}")
|
| 910 |
+
result.update({
|
| 911 |
+
"error": "MTR çıktısı çözümlenemedi. Biçim hatası.",
|
| 912 |
+
"details": f"JSON çözümleme hatası: {e}",
|
| 913 |
+
"raw_output": stdout,
|
| 914 |
+
"raw_error": stderr
|
| 915 |
+
})
|
| 916 |
+
return result
|
| 917 |
+
|
| 918 |
+
report = mtr_data.get('report', {})
|
| 919 |
+
hubs = report.get('hubs', [])
|
| 920 |
+
|
| 921 |
+
cleaned_hops = []
|
| 922 |
+
for idx, hop in enumerate(hubs, 1):
|
| 923 |
+
host = hop.get('host', '???').strip()
|
| 924 |
+
|
| 925 |
+
is_private = False
|
| 926 |
+
try:
|
| 927 |
+
if host != '???':
|
| 928 |
+
ip_obj = ipaddress.ip_address(host)
|
| 929 |
+
is_private = ip_obj.is_private
|
| 930 |
+
except ValueError:
|
| 931 |
+
pass
|
| 932 |
+
|
| 933 |
+
loss = float(hop.get('Loss%', 0.0))
|
| 934 |
+
avg = float(hop.get('Avg', 0.0))
|
| 935 |
+
best = float(hop.get('Best', 0.0))
|
| 936 |
+
worst = float(hop.get('Worst', 0.0))
|
| 937 |
+
last = float(hop.get('Last', 0.0))
|
| 938 |
+
snt = int(hop.get('Snt', 0))
|
| 939 |
+
|
| 940 |
+
cleaned_hops.append({
|
| 941 |
+
'count': idx,
|
| 942 |
+
'host': host,
|
| 943 |
+
'loss': loss,
|
| 944 |
+
'avg': avg,
|
| 945 |
+
'best': best,
|
| 946 |
+
'worst': worst,
|
| 947 |
+
'last': last,
|
| 948 |
+
'is_private': is_private,
|
| 949 |
+
'packets_sent': snt
|
| 950 |
+
})
|
| 951 |
+
|
| 952 |
+
overall_packet_loss = float(report.get('loss', 0.0))
|
| 953 |
+
if not hubs and overall_packet_loss == 0.0 and snt == 0:
|
| 954 |
+
overall_packet_loss = 100.0 if "No route to host" in stderr or "Host unreachable" in stderr else 0.0
|
| 955 |
+
|
| 956 |
+
result.update({
|
| 957 |
+
"hops": cleaned_hops,
|
| 958 |
+
"destination": report.get('dst', ip).strip(),
|
| 959 |
+
"packet_loss": overall_packet_loss,
|
| 960 |
+
"total_hops": len(cleaned_hops)
|
| 961 |
+
})
|
| 962 |
+
|
| 963 |
+
except subprocess.TimeoutExpired:
|
| 964 |
+
process.kill()
|
| 965 |
+
stdout, stderr = process.communicate()
|
| 966 |
+
error_msg = f"MTR analizi zaman aşımına uğradı ({ip})"
|
| 967 |
+
logger.error(error_msg)
|
| 968 |
+
result.update({
|
| 969 |
+
"error": "MTR analizi zaman aşımına uğradı. Hedefe ulaşmak çok uzun sürdü veya ağ engellendi.",
|
| 970 |
+
"details": "Analiz 60 saniyeden uzun sürdü.",
|
| 971 |
+
"raw_output": stdout,
|
| 972 |
+
"raw_error": stderr
|
| 973 |
+
})
|
| 974 |
+
|
| 975 |
+
except FileNotFoundError:
|
| 976 |
+
logger.error("MTR komutu bulunamadı. Lütfen sisteminizde 'mtr' yüklü olduğundan emin olun.")
|
| 977 |
+
result.update({
|
| 978 |
+
"error": "MTR komutu bulunamadı. Sunucuda 'mtr' kurulu değil.",
|
| 979 |
+
"details": "Lütfen sunucunuzda MTR'ı kurun."
|
| 980 |
+
})
|
| 981 |
+
except PermissionError as e:
|
| 982 |
+
logger.error(f"MTR çalıştırmak için izin hatası: {e}")
|
| 983 |
+
result.update({
|
| 984 |
+
"error": "MTR çalıştırmak için yetki hatası.",
|
| 985 |
+
"details": f"MTR komutunu çalıştırmak için gerekli izinler yok. Hata: {e}"
|
| 986 |
+
})
|
| 987 |
+
except Exception as e:
|
| 988 |
+
logger.error(f"MTR beklenmedik hata için {ip}: {e}")
|
| 989 |
+
result.update({
|
| 990 |
+
"error": "Beklenmedik bir hata oluştu.",
|
| 991 |
+
"details": str(e)
|
| 992 |
+
})
|
| 993 |
+
|
| 994 |
+
return result
|
| 995 |
+
|
| 996 |
+
def get_bgpview_prefix_details(ip):
|
| 997 |
+
"""
|
| 998 |
+
BGPView API'sini kullanarak bir IP adresinin ait olduğu prefix hakkında
|
| 999 |
+
detaylı bilgi alır (prefix, isim, açıklama, ülke).
|
| 1000 |
+
"""
|
| 1001 |
+
try:
|
| 1002 |
+
url = f"https://api.bgpview.io/ip/{ip}"
|
| 1003 |
+
response = robust_get_request(url, timeout=5)
|
| 1004 |
+
if response and response.status_code == 200:
|
| 1005 |
+
data = response.json().get("data", {})
|
| 1006 |
+
# Birden fazla prefix olabilir, en spesifik olanı (en uzun) alalım.
|
| 1007 |
+
longest_prefix = max(data.get("prefixes", []), key=lambda p: p.get("prefix", "").split('/')[1], default=None)
|
| 1008 |
+
if longest_prefix:
|
| 1009 |
+
return {
|
| 1010 |
+
"prefix": longest_prefix.get("prefix"),
|
| 1011 |
+
"name": longest_prefix.get("name"),
|
| 1012 |
+
"description": longest_prefix.get("description"),
|
| 1013 |
+
"country_code": longest_prefix.get("country_code")
|
| 1014 |
+
}
|
| 1015 |
+
return None
|
| 1016 |
+
except Exception as e:
|
| 1017 |
+
logger.error(f"BGPView prefix detayları alınamadı: {str(e)}")
|
| 1018 |
+
return None
|
| 1019 |
+
|
| 1020 |
+
def get_peeringdb_info(asn):
|
| 1021 |
+
"""
|
| 1022 |
+
PeeringDB API'sini kullanarak bir ASN hakkında temel bilgileri
|
| 1023 |
+
(isim, web sitesi, trafik türü, anons edilen prefix sayısı) alır.
|
| 1024 |
+
"""
|
| 1025 |
+
if not asn:
|
| 1026 |
+
return None
|
| 1027 |
+
try:
|
| 1028 |
+
url = f"https://www.peeringdb.com/api/net?asn={asn}"
|
| 1029 |
+
response = robust_get_request(url, timeout=7)
|
| 1030 |
+
if response and response.status_code == 200:
|
| 1031 |
+
data = response.json().get("data", [])
|
| 1032 |
+
if data:
|
| 1033 |
+
net_info = data[0]
|
| 1034 |
+
return {
|
| 1035 |
+
"name": net_info.get("name"),
|
| 1036 |
+
"website": net_info.get("website"),
|
| 1037 |
+
"traffic_type": net_info.get("info_traffic"),
|
| 1038 |
+
"prefix_count_ipv4": net_info.get("info_prefixes4"),
|
| 1039 |
+
"prefix_count_ipv6": net_info.get("info_prefixes6"),
|
| 1040 |
+
"policy": net_info.get("policy_general")
|
| 1041 |
+
}
|
| 1042 |
+
return None
|
| 1043 |
+
except Exception as e:
|
| 1044 |
+
logger.error(f"PeeringDB bilgisi alınamadı: {str(e)}")
|
| 1045 |
+
return None
|
| 1046 |
+
|
| 1047 |
def parse_rdap_response(data):
|
| 1048 |
+
"""
|
| 1049 |
+
Ham RDAP JSON yanıtını ayrıştırarak ön yüzde kullanılacak
|
| 1050 |
+
yapılandırılmış bir sözlük oluşturur.
|
| 1051 |
+
"""
|
| 1052 |
parsed_info = {
|
| 1053 |
"summary": {},
|
| 1054 |
"contacts": {
|
|
|
|
| 1059 |
"other": []
|
| 1060 |
},
|
| 1061 |
"details": {},
|
| 1062 |
+
"source_rir": "Bilinmiyor",
|
| 1063 |
}
|
| 1064 |
|
| 1065 |
+
# RIR kaynağını (RIPE, ARIN vb.) belirle
|
| 1066 |
if "port43" in data:
|
| 1067 |
port43_val = data["port43"].lower()
|
| 1068 |
rir_mapping = {
|
| 1069 |
"apnic": "APNIC", "lacnic": "LACNIC", "afrinic": "AfriNIC",
|
| 1070 |
"arin": "ARIN", "ripe": "RIPE NCC",
|
| 1071 |
}
|
| 1072 |
+
parsed_info["source_rir"] = next((v for k, v in rir_mapping.items() if k in port43_val), "Bilinmiyor")
|
| 1073 |
|
| 1074 |
+
# Temel özet bilgileri
|
| 1075 |
summary = {
|
| 1076 |
+
"name": data.get("name", "Bilinmiyor"),
|
| 1077 |
+
"handle": data.get("handle", "Bilinmiyor"),
|
| 1078 |
+
"country": data.get("country", "Bilinmiyor"),
|
| 1079 |
"ip_range": f"{data.get('startAddress', '')} - {data.get('endAddress', '')}",
|
| 1080 |
"asn_range": f"{data.get('startAutnum', '')} - {data.get('endAutnum', '')}",
|
| 1081 |
+
"type": data.get("type", "Bilinmiyor").title(),
|
| 1082 |
}
|
| 1083 |
if summary["ip_range"] == " - ": summary.pop("ip_range", None)
|
| 1084 |
if summary["asn_range"] == " - ": summary.pop("asn_range", None)
|
| 1085 |
parsed_info["summary"] = summary
|
| 1086 |
|
| 1087 |
+
# İletişim bilgilerini ayrıştır
|
| 1088 |
for entity in data.get("entities", []):
|
| 1089 |
vcard_array = entity.get("vcardArray")
|
| 1090 |
if not vcard_array:
|
|
|
|
| 1092 |
contact_details = parse_vcard(vcard_array)
|
| 1093 |
roles = entity.get("roles", [])
|
| 1094 |
|
| 1095 |
+
# Rolleri uygun kategorilere ata
|
| 1096 |
assigned = False
|
| 1097 |
for role in ["registrant", "administrative", "technical", "abuse"]:
|
| 1098 |
if role in roles:
|
|
|
|
| 1101 |
if not assigned:
|
| 1102 |
parsed_info["contacts"]["other"].append(contact_details)
|
| 1103 |
|
|
|
|
| 1104 |
parsed_info["details"] = {
|
| 1105 |
"events": data.get("events", []),
|
| 1106 |
"remarks": data.get("remarks", []),
|
| 1107 |
"links": data.get("links", []),
|
| 1108 |
"notices": data.get("notices", []),
|
| 1109 |
+
"object_class_name": data.get("objectClassName", "Bilinmiyor"),
|
| 1110 |
}
|
| 1111 |
|
| 1112 |
return parsed_info
|
| 1113 |
|
| 1114 |
+
def get_authoritative_rdap_url(query):
|
| 1115 |
+
query = query.strip().upper().replace("AS", "")
|
| 1116 |
+
is_ip = "." in query or ":" in query
|
| 1117 |
+
rdap_type = "ip" if is_ip else "autnum"
|
| 1118 |
+
bootstrap_url = IANA_IP_BOOTSTRAP_URL if is_ip else IANA_ASN_BOOTSTRAP_URL
|
| 1119 |
+
|
| 1120 |
+
try:
|
| 1121 |
+
r = robust_get_request(f"{bootstrap_url}{query}", timeout=10, headers={"Accept": "application/rdap+json"})
|
| 1122 |
+
if r.status_code == 501:
|
| 1123 |
+
raise requests.exceptions.HTTPError("IANA RDAP not implemented", response=r)
|
| 1124 |
+
|
| 1125 |
+
r.raise_for_status()
|
| 1126 |
+
iana_data = r.json()
|
| 1127 |
+
for link in iana_data.get("links", []):
|
| 1128 |
+
if link.get("rel") == "related":
|
| 1129 |
+
return link.get("href")
|
| 1130 |
+
logger.warning("IANA RDAP sonucu var ama yönlendirme linki bulunamadı.")
|
| 1131 |
+
except requests.exceptions.HTTPError as http_err:
|
| 1132 |
+
if http_err.response.status_code == 501:
|
| 1133 |
+
logger.warning(f"IANA RDAP not implemented, trying fallback RIRs")
|
| 1134 |
+
else:
|
| 1135 |
+
logger.warning(f"IANA RDAP URL alınamadı: {str(http_err)}")
|
| 1136 |
+
except requests.exceptions.RequestException as e:
|
| 1137 |
+
logger.warning(f"IANA RDAP bağlantısı başarısız: {str(e)}")
|
| 1138 |
+
|
| 1139 |
+
fallback_rir_urls = {
|
| 1140 |
+
"ripe": f"https://rdap.db.ripe.net/{rdap_type}/{query}",
|
| 1141 |
+
"apnic": f"https://rdap.apnic.net/{rdap_type}/{query}",
|
| 1142 |
+
"lacnic": f"https://rdap.lacnic.net/rdap/{rdap_type}/{query}",
|
| 1143 |
+
"afrinic": f"https://rdap.afrinic.net/rdap/{rdap_type}/{query}",
|
| 1144 |
+
"arin": f"https://rdap.arin.net/registry/{rdap_type}/{query}"
|
| 1145 |
+
}
|
| 1146 |
+
|
| 1147 |
+
for rir, url in fallback_rir_urls.items():
|
| 1148 |
+
try:
|
| 1149 |
+
r = robust_get_request(url, timeout=10, headers={"Accept": "application/rdap+json"})
|
| 1150 |
+
if r.status_code == 200:
|
| 1151 |
+
logger.info(f"{rir.upper()} RDAP kaynağı kullanıldı: {url}")
|
| 1152 |
+
return url
|
| 1153 |
+
except requests.exceptions.RequestException:
|
| 1154 |
+
continue
|
| 1155 |
+
|
| 1156 |
+
logger.warning(f"Tüm RDAP kaynakları başarısız oldu: {query}")
|
| 1157 |
+
return None
|
| 1158 |
+
|
| 1159 |
def get_additional_info_from_ripestat(query):
|
|
|
|
| 1160 |
additional_data = {}
|
| 1161 |
endpoints = {
|
| 1162 |
+
"announced-prefixes": "Anons Edilen Prefixler",
|
| 1163 |
+
"routing-status": "Yönlendirme Durumu",
|
| 1164 |
+
"bgp-state": "BGP Durumu"
|
| 1165 |
}
|
| 1166 |
|
| 1167 |
for endpoint, title in endpoints.items():
|
|
|
|
| 1172 |
if r.status_code == 200:
|
| 1173 |
additional_data[title] = r.json().get("data", {})
|
| 1174 |
else:
|
| 1175 |
+
additional_data[title] = {"error": f"HTTP {r.status_code}: Veri alınamadı"}
|
| 1176 |
except requests.exceptions.RequestException as e:
|
| 1177 |
+
additional_data[title] = {"error": f"İstek hatası: {str(e)}"}
|
| 1178 |
except Exception as e:
|
| 1179 |
+
additional_data[title] = {"error": f"Beklenmedik hata: {str(e)}"}
|
| 1180 |
|
| 1181 |
return additional_data
|
| 1182 |
|
| 1183 |
def get_ipinfo_details(ip):
|
|
|
|
| 1184 |
if not re.match(r"^\d{1,3}(?:\.\d{1,3}){3}$", ip) and ":" not in ip:
|
| 1185 |
return None
|
| 1186 |
|
|
|
|
| 1193 |
return data
|
| 1194 |
return None
|
| 1195 |
except requests.exceptions.RequestException as e:
|
| 1196 |
+
logger.warning(f"IPinfo bilgileri alınamadı: {str(e)}")
|
| 1197 |
return None
|
| 1198 |
|
| 1199 |
def analyze_ssl(hostname, port=443):
|
|
|
|
| 1200 |
try:
|
| 1201 |
context = ssl.create_default_context()
|
| 1202 |
context.check_hostname = False
|
|
|
|
| 1245 |
}
|
| 1246 |
|
| 1247 |
except ssl.SSLError as e:
|
| 1248 |
+
logger.error(f"SSL hatası ({hostname}:{port}): {str(e)}")
|
| 1249 |
+
return {"error": f"SSL hatası: {str(e)}"}
|
| 1250 |
except socket.timeout:
|
| 1251 |
+
logger.error(f"Zaman aşımı ({hostname}:{port})")
|
| 1252 |
+
return {"error": "Bağlantı zaman aşımı"}
|
| 1253 |
except Exception as e:
|
| 1254 |
+
logger.error(f"Beklenmedik hata ({hostname}:{port}): {str(e)}")
|
| 1255 |
+
return {"error": f"Beklenmedik hata: {str(e)}"}
|
| 1256 |
|
| 1257 |
def _get_alt_names(x509):
|
|
|
|
| 1258 |
alt_names = []
|
| 1259 |
for i in range(x509.get_extension_count()):
|
| 1260 |
ext = x509.get_extension(i)
|
|
|
|
| 1263 |
return alt_names
|
| 1264 |
|
| 1265 |
def generate_security_report(ipinfo_data, http_headers, port_scan, dnsbl_results, ip):
|
|
|
|
| 1266 |
report = {
|
| 1267 |
"proxy_vpn_tor": False,
|
| 1268 |
"anonymity_services": [],
|
|
|
|
| 1278 |
report["proxy_vpn_tor"] = True
|
| 1279 |
detected_str = ", ".join(report["anonymity_services"])
|
| 1280 |
report["security_issues"].append(
|
| 1281 |
+
f"Anonim ağ kullanımı tespit edildi: {detected_str}"
|
| 1282 |
)
|
| 1283 |
|
| 1284 |
if ipinfo_data:
|
|
|
|
| 1299 |
category_map = {
|
| 1300 |
'spam': 'Spam',
|
| 1301 |
'proxy_bot': 'Proxy/Bot',
|
| 1302 |
+
'anonymity': 'Anonim Servis'
|
| 1303 |
}
|
| 1304 |
|
| 1305 |
service_name = category_map.get(category, category.capitalize())
|
|
|
|
| 1313 |
open_critical = [p for p in report["open_ports"] if p["port"] in critical_ports]
|
| 1314 |
|
| 1315 |
if open_critical:
|
| 1316 |
+
report["security_issues"].append(f"{len(open_critical)} kritik port açık bulundu")
|
| 1317 |
|
| 1318 |
if http_headers and not http_headers.get("error") and "headers" in http_headers:
|
| 1319 |
headers = {k.lower(): v for k, v in http_headers["headers"].items()}
|
|
|
|
| 1336 |
if real_name not in headers
|
| 1337 |
]
|
| 1338 |
if missing_headers:
|
| 1339 |
+
report["security_issues"].append(f"{len(missing_headers)} güvenlik başlığı eksik")
|
| 1340 |
|
| 1341 |
return report
|
| 1342 |
|
| 1343 |
def check_anonymity_services(ip):
|
|
|
|
| 1344 |
anonymity_services = []
|
| 1345 |
proxy_lists = [
|
| 1346 |
'proxy.dnsbl.sorbs.net',
|
|
|
|
| 1378 |
'tornevall': 'VPN',
|
| 1379 |
'efnetrbl': 'IRC Proxy',
|
| 1380 |
'spamrats': 'Spam Bot',
|
| 1381 |
+
'blocklist': 'Hack Saldırısı',
|
| 1382 |
'dronebl': 'Zombie/Botnet'
|
| 1383 |
}
|
| 1384 |
|
|
|
|
| 1397 |
return anonymity_services
|
| 1398 |
|
| 1399 |
except Exception as e:
|
| 1400 |
+
logger.error(f"Anonimlik kontrol hatası: {str(e)}")
|
| 1401 |
return []
|
| 1402 |
|
| 1403 |
def get_ip_classification(ip):
|
|
|
|
| 1404 |
try:
|
| 1405 |
ip_obj = ipaddress.ip_address(ip)
|
| 1406 |
if ip_obj.is_private:
|
| 1407 |
+
return "Private", "Özel ağ kullanımı (LAN)"
|
| 1408 |
elif ip_obj.is_multicast:
|
| 1409 |
+
return "Multicast", "Çoklu yayın ağı"
|
| 1410 |
elif ip_obj.is_global:
|
| 1411 |
+
return "Public", "Genel internet"
|
| 1412 |
else:
|
| 1413 |
+
return "Special", "Özel kullanım"
|
| 1414 |
except ValueError:
|
| 1415 |
+
return "Invalid", "Geçersiz IP adresi"
|
| 1416 |
|
| 1417 |
def robust_get_request(url, retries=3, backoff_factor=0.5, **kwargs):
|
|
|
|
| 1418 |
headers = kwargs.get('headers', {})
|
| 1419 |
headers.setdefault('User-Agent', 'Luminet/1.0 (Network Analysis Tool)')
|
| 1420 |
kwargs['headers'] = headers
|
|
|
|
| 1431 |
time.sleep(wait_time)
|
| 1432 |
return None
|
| 1433 |
|
| 1434 |
+
# Rotalar
|
| 1435 |
@app.route("/", methods=["GET", "POST"])
|
| 1436 |
def index():
|
| 1437 |
+
# Başlangıç değişkenlerini tanımla
|
| 1438 |
query = request.form.get("query", "").strip() if request.method == "POST" else ""
|
| 1439 |
main_data, additional_data, ipinfo_data, raw_json_str, error = None, None, None, None, None
|
| 1440 |
+
traceroute_locations, asn_map_data, origin_asn = None, None, None
|
| 1441 |
reverse_dns, ping_data, port_scan, http_headers, ssl_info = None, None, None, None, None
|
| 1442 |
+
dnsbl_results, security_report, bgp_graph_data = None, None, None
|
| 1443 |
+
bgpview_prefix, mtr_data, peeringdb_data = None, None, None
|
| 1444 |
|
| 1445 |
+
# Yeni DNS değişkenleri
|
| 1446 |
dns_records, authoritative_dns, dnssec_status = None, None, None
|
| 1447 |
|
| 1448 |
if request.method == "POST" and query:
|
| 1449 |
try:
|
| 1450 |
+
# Alan adı veya IP'yi çözümle
|
| 1451 |
ip_to_query = query
|
| 1452 |
is_domain = False
|
| 1453 |
try:
|
|
|
|
| 1455 |
ip_to_query = addr_info[0][4][0]
|
| 1456 |
is_domain = True
|
| 1457 |
except socket.gaierror:
|
| 1458 |
+
pass # Bu bir IP adresi olabilir
|
| 1459 |
except Exception as e:
|
| 1460 |
+
logger.error(f"DNS çözümleme hatası: {str(e)}")
|
| 1461 |
|
| 1462 |
+
# RDAP sorgusu ile başla
|
| 1463 |
authoritative_url = get_authoritative_rdap_url(ip_to_query)
|
| 1464 |
if authoritative_url:
|
| 1465 |
try:
|
|
|
|
| 1472 |
main_data["summary"]["ip_class_desc"] = ip_class_desc
|
| 1473 |
raw_json_str = json.dumps(raw_data, indent=2, ensure_ascii=False)
|
| 1474 |
except requests.exceptions.HTTPError as http_err:
|
| 1475 |
+
error = f"API Hatası (Kod: {http_err.response.status_code}): '{query}' için kayıt bulunamadı."
|
| 1476 |
except Exception as e:
|
| 1477 |
+
error = f"RDAP verisi alınamadı: {str(e)}"
|
| 1478 |
else:
|
| 1479 |
+
error = f"Yetkili RDAP sunucusu bulunamadı: {query}"
|
| 1480 |
|
| 1481 |
if error:
|
| 1482 |
logger.error(error)
|
| 1483 |
|
| 1484 |
+
# RDAP'de hata yoksa, diğer analizleri paralel olarak yap
|
| 1485 |
if not error:
|
| 1486 |
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
|
| 1487 |
+
# Paralel çalışacak görevleri tanımla
|
| 1488 |
tasks = {
|
| 1489 |
'ipinfo': executor.submit(get_ipinfo_details, ip_to_query),
|
| 1490 |
'ripestat': executor.submit(get_additional_info_from_ripestat, ip_to_query),
|
| 1491 |
'reverse_dns': executor.submit(get_reverse_dns, ip_to_query),
|
| 1492 |
'ping': executor.submit(get_ping_latency, ip_to_query),
|
| 1493 |
'dnsbl': executor.submit(check_dnsbl, ip_to_query),
|
| 1494 |
+
'mtr': executor.submit(run_mtr_analysis, ip_to_query),
|
| 1495 |
+
'bgpview': executor.submit(get_bgpview_prefix_details, ip_to_query),
|
| 1496 |
'traceroute': executor.submit(enriched_traceroute, ip_to_query)
|
| 1497 |
}
|
| 1498 |
+
# Sonuçları topla
|
| 1499 |
results = {name: future.result() for name, future in tasks.items()}
|
| 1500 |
|
| 1501 |
ipinfo_data = results.get('ipinfo')
|
| 1502 |
additional_data = results.get('ripestat')
|
| 1503 |
reverse_dns = results.get('reverse_dns')
|
| 1504 |
ping_data = results.get('ping')
|
| 1505 |
+
mtr_data = results.get('mtr')
|
| 1506 |
dnsbl_results = results.get('dnsbl')
|
| 1507 |
+
bgpview_prefix = results.get('bgpview')
|
| 1508 |
traceroute_locations = results.get('traceroute')
|
| 1509 |
|
| 1510 |
+
# Alan adıysa DNS kayıtlarını al
|
| 1511 |
if is_domain:
|
| 1512 |
try:
|
| 1513 |
dns_records = get_dns_records(query)
|
| 1514 |
authoritative_dns = get_authoritative_dns(query)
|
| 1515 |
dnssec_status = check_dnssec(query)
|
| 1516 |
except Exception as e:
|
| 1517 |
+
logger.error(f"DNS sorgu hatası: {str(e)}")
|
| 1518 |
+
# Hata durumunda boş döndür
|
| 1519 |
+
dns_records, authoritative_dns, dnssec_status = {}, None, "Hata"
|
| 1520 |
|
| 1521 |
+
# ASN'yi ipinfo'dan al, yoksa RIPEstat'tan dene
|
| 1522 |
+
if ipinfo_data and ipinfo_data.get('asn'):
|
| 1523 |
+
origin_asn = ipinfo_data['asn'].get('asn', '').replace("AS", "")
|
| 1524 |
else:
|
| 1525 |
origin_asn = get_asn_from_ip(ip_to_query)
|
| 1526 |
+
|
| 1527 |
+
# ASN bilgisiyle PeeringDB'yi sorgula (bu sıralı olmalı)
|
| 1528 |
+
peeringdb_data = get_peeringdb_info(origin_asn)
|
| 1529 |
|
| 1530 |
+
# Sıralı çalışması gereken diğer görevler
|
| 1531 |
target_host = query if is_domain else ip_to_query
|
| 1532 |
http_headers = get_http_headers(target_host)
|
| 1533 |
if http_headers and not http_headers.get("error"):
|
| 1534 |
ssl_info = analyze_ssl(target_host)
|
| 1535 |
|
|
|
|
| 1536 |
if NMAP_AVAILABLE:
|
| 1537 |
port_scan = scan_ports(ip_to_query)
|
| 1538 |
else:
|
| 1539 |
+
port_scan = {"error": "Nmap kurulu değil"}
|
| 1540 |
+
|
| 1541 |
+
_, asn_map_data = get_asn_map_data(ip_to_query)
|
| 1542 |
|
|
|
|
| 1543 |
security_report = generate_security_report(
|
| 1544 |
ipinfo_data, http_headers, port_scan, dnsbl_results, ip_to_query
|
| 1545 |
)
|
| 1546 |
|
| 1547 |
+
if additional_data and 'BGP Durumu' in additional_data:
|
| 1548 |
+
bgp_graph_data = process_bgp_data_for_d3(additional_data['BGP Durumu'], origin_asn)
|
| 1549 |
+
|
| 1550 |
except requests.exceptions.ConnectionError as conn_err:
|
| 1551 |
+
error = f"Bağlantı hatası: {str(conn_err)}"
|
| 1552 |
logger.error(error)
|
| 1553 |
except Exception as e:
|
| 1554 |
+
error = f"Beklenmedik bir hata oluştu: {str(e)}"
|
| 1555 |
+
logger.error(f"Ana işlem hatası: {str(e)}", exc_info=True)
|
| 1556 |
|
| 1557 |
elif request.method == "POST" and not query:
|
| 1558 |
+
error = "Lütfen bir IP adresi veya alan adı girin."
|
| 1559 |
|
| 1560 |
+
# Tüm toplanan verileri şablona gönder
|
| 1561 |
return render_template(
|
| 1562 |
"index.html",
|
| 1563 |
query=query,
|
|
|
|
| 1572 |
port_scan=port_scan,
|
| 1573 |
http_headers=http_headers,
|
| 1574 |
ssl_info=ssl_info,
|
|
|
|
|
|
|
| 1575 |
traceroute_locations=traceroute_locations,
|
| 1576 |
+
asn_map_data=asn_map_data,
|
| 1577 |
+
security_report=security_report,
|
| 1578 |
+
bgp_graph_data=bgp_graph_data,
|
| 1579 |
+
origin_asn=origin_asn,
|
| 1580 |
+
bgpview_prefix=bgpview_prefix,
|
| 1581 |
+
mtr_data=mtr_data,
|
| 1582 |
+
peeringdb_data=peeringdb_data,
|
| 1583 |
dns_records=dns_records,
|
| 1584 |
authoritative_dns=authoritative_dns,
|
| 1585 |
+
dnssec_status=dnssec_status
|
|
|
|
| 1586 |
)
|
| 1587 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1588 |
if __name__ == "__main__":
|
| 1589 |
port = int(os.environ.get("PORT", 7860))
|
| 1590 |
app.run(host="0.0.0.0", port=port, debug=False)
|