Update main.py
Browse files
main.py
CHANGED
|
@@ -607,6 +607,331 @@ def get_asn_from_ip(ip):
|
|
| 607 |
logger.error(f"ASN lookup failed: {str(e)}")
|
| 608 |
return None
|
| 609 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 610 |
def get_authoritative_rdap_url(query):
|
| 611 |
query = query.strip().upper().replace("AS", "")
|
| 612 |
is_ip = "." in query or ":" in query
|
|
@@ -1083,6 +1408,8 @@ def index():
|
|
| 1083 |
'reverse_dns': executor.submit(get_reverse_dns, ip_to_query),
|
| 1084 |
'ping': executor.submit(get_ping_latency, ip_to_query),
|
| 1085 |
'dnsbl': executor.submit(check_dnsbl, ip_to_query),
|
|
|
|
|
|
|
| 1086 |
'traceroute': executor.submit(enriched_traceroute, ip_to_query)
|
| 1087 |
}
|
| 1088 |
results = {name: future.result() for name, future in tasks.items()}
|
|
@@ -1091,7 +1418,9 @@ def index():
|
|
| 1091 |
additional_data = results.get('ripestat')
|
| 1092 |
reverse_dns = results.get('reverse_dns')
|
| 1093 |
ping_data = results.get('ping')
|
|
|
|
| 1094 |
dnsbl_results = results.get('dnsbl')
|
|
|
|
| 1095 |
traceroute_locations = results.get('traceroute')
|
| 1096 |
|
| 1097 |
# DNS records for domains
|
|
@@ -1109,6 +1438,9 @@ def index():
|
|
| 1109 |
origin_asn = ipinfo_data['asn'].get('asn', '').replace("AS", "")
|
| 1110 |
else:
|
| 1111 |
origin_asn = get_asn_from_ip(ip_to_query)
|
|
|
|
|
|
|
|
|
|
| 1112 |
|
| 1113 |
# HTTP and SSL analysis
|
| 1114 |
target_host = query if is_domain else ip_to_query
|
|
@@ -1122,11 +1454,18 @@ def index():
|
|
| 1122 |
else:
|
| 1123 |
port_scan = {"error": "Nmap not installed"}
|
| 1124 |
|
|
|
|
|
|
|
|
|
|
| 1125 |
# Security report
|
| 1126 |
security_report = generate_security_report(
|
| 1127 |
ipinfo_data, http_headers, port_scan, dnsbl_results, ip_to_query
|
| 1128 |
)
|
| 1129 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1130 |
except requests.exceptions.ConnectionError as conn_err:
|
| 1131 |
error = f"Connection error: {str(conn_err)}"
|
| 1132 |
logger.error(error)
|
|
@@ -1154,10 +1493,15 @@ def index():
|
|
| 1154 |
dnsbl_results=dnsbl_results,
|
| 1155 |
security_report=security_report,
|
| 1156 |
traceroute_locations=traceroute_locations,
|
|
|
|
|
|
|
| 1157 |
dns_records=dns_records,
|
| 1158 |
authoritative_dns=authoritative_dns,
|
| 1159 |
dnssec_status=dnssec_status,
|
| 1160 |
-
origin_asn=origin_asn
|
|
|
|
|
|
|
|
|
|
| 1161 |
)
|
| 1162 |
|
| 1163 |
# API endpoint for external access
|
|
|
|
| 607 |
logger.error(f"ASN lookup failed: {str(e)}")
|
| 608 |
return None
|
| 609 |
|
| 610 |
+
def get_asn_path_locations(asn):
|
| 611 |
+
try:
|
| 612 |
+
url = f"https://api.bgpview.io/asn/{asn}/prefixes"
|
| 613 |
+
r = requests.get(url, timeout=10)
|
| 614 |
+
if r.status_code == 200:
|
| 615 |
+
data = r.json()
|
| 616 |
+
prefixes = data.get("data", {}).get("ipv4_prefixes", []) + data.get("data", {}).get("ipv6_prefixes", [])
|
| 617 |
+
countries = [p.get("country_code") for p in prefixes if p.get("country_code")]
|
| 618 |
+
return Counter(countries)
|
| 619 |
+
return Counter()
|
| 620 |
+
except Exception as e:
|
| 621 |
+
logger.error(f"BGPView ASN path error: {str(e)}")
|
| 622 |
+
return Counter()
|
| 623 |
+
|
| 624 |
+
def lookup_coords_osm(country_code):
|
| 625 |
+
try:
|
| 626 |
+
if not country_code:
|
| 627 |
+
return None, None
|
| 628 |
+
|
| 629 |
+
country_name = get_country_name(country_code)
|
| 630 |
+
if not country_name:
|
| 631 |
+
return None, None
|
| 632 |
+
|
| 633 |
+
params = {"q": country_name, "format": "json", "limit": 1}
|
| 634 |
+
headers = {"User-Agent": "Luminet/1.0 (Network Analysis Tool)"}
|
| 635 |
+
r = requests.get(OSM_NOMINATIM_URL, params=params, headers=headers, timeout=5)
|
| 636 |
+
r.raise_for_status()
|
| 637 |
+
results = r.json()
|
| 638 |
+
if results:
|
| 639 |
+
return float(results[0]["lat"]), float(results[0]["lon"])
|
| 640 |
+
return None, None
|
| 641 |
+
except Exception as e:
|
| 642 |
+
logger.error(f"[OSM Lookup Error] {country_code}: {str(e)}")
|
| 643 |
+
return None, None
|
| 644 |
+
|
| 645 |
+
def get_asn_map_data(ip):
|
| 646 |
+
asn = get_asn_from_ip(ip)
|
| 647 |
+
if not asn:
|
| 648 |
+
return (None, [])
|
| 649 |
+
|
| 650 |
+
location_counts = get_asn_path_locations(asn)
|
| 651 |
+
map_points = []
|
| 652 |
+
|
| 653 |
+
for country_code, count in location_counts.most_common(15):
|
| 654 |
+
lat, lon = lookup_coords_osm(country_code)
|
| 655 |
+
if lat and lon:
|
| 656 |
+
map_points.append({
|
| 657 |
+
"loc": country_code,
|
| 658 |
+
"count": count,
|
| 659 |
+
"lat": lat,
|
| 660 |
+
"lon": lon,
|
| 661 |
+
"country_name": get_country_name(country_code)
|
| 662 |
+
})
|
| 663 |
+
|
| 664 |
+
return (asn, map_points)
|
| 665 |
+
|
| 666 |
+
def get_bgpview_prefix_details(ip):
|
| 667 |
+
"""
|
| 668 |
+
BGPView API to get detailed prefix information for an IP address
|
| 669 |
+
"""
|
| 670 |
+
try:
|
| 671 |
+
url = f"https://api.bgpview.io/ip/{ip}"
|
| 672 |
+
response = robust_get_request(url, timeout=5)
|
| 673 |
+
if response and response.status_code == 200:
|
| 674 |
+
data = response.json().get("data", {})
|
| 675 |
+
# Get the most specific prefix (longest)
|
| 676 |
+
longest_prefix = max(data.get("prefixes", []), key=lambda p: p.get("prefix", "").split('/')[1], default=None)
|
| 677 |
+
if longest_prefix:
|
| 678 |
+
return {
|
| 679 |
+
"prefix": longest_prefix.get("prefix"),
|
| 680 |
+
"name": longest_prefix.get("name"),
|
| 681 |
+
"description": longest_prefix.get("description"),
|
| 682 |
+
"country_code": longest_prefix.get("country_code")
|
| 683 |
+
}
|
| 684 |
+
return None
|
| 685 |
+
except Exception as e:
|
| 686 |
+
logger.error(f"BGPView prefix details failed: {str(e)}")
|
| 687 |
+
return None
|
| 688 |
+
|
| 689 |
+
def get_peeringdb_info(asn):
|
| 690 |
+
"""
|
| 691 |
+
PeeringDB API to get ASN information
|
| 692 |
+
"""
|
| 693 |
+
if not asn:
|
| 694 |
+
return None
|
| 695 |
+
try:
|
| 696 |
+
url = f"https://www.peeringdb.com/api/net?asn={asn}"
|
| 697 |
+
response = robust_get_request(url, timeout=7)
|
| 698 |
+
if response and response.status_code == 200:
|
| 699 |
+
data = response.json().get("data", [])
|
| 700 |
+
if data:
|
| 701 |
+
net_info = data[0]
|
| 702 |
+
return {
|
| 703 |
+
"name": net_info.get("name"),
|
| 704 |
+
"website": net_info.get("website"),
|
| 705 |
+
"traffic_type": net_info.get("info_traffic"),
|
| 706 |
+
"prefix_count_ipv4": net_info.get("info_prefixes4"),
|
| 707 |
+
"prefix_count_ipv6": net_info.get("info_prefixes6"),
|
| 708 |
+
"policy": net_info.get("policy_general")
|
| 709 |
+
}
|
| 710 |
+
return None
|
| 711 |
+
except Exception as e:
|
| 712 |
+
logger.error(f"PeeringDB info failed: {str(e)}")
|
| 713 |
+
return None
|
| 714 |
+
|
| 715 |
+
def run_mtr_analysis(ip):
|
| 716 |
+
if not ip or not isinstance(ip, str):
|
| 717 |
+
return {"error": "Invalid IP address or hostname."}
|
| 718 |
+
|
| 719 |
+
try:
|
| 720 |
+
ipaddress.ip_address(ip)
|
| 721 |
+
except ValueError:
|
| 722 |
+
pass
|
| 723 |
+
|
| 724 |
+
command = [
|
| 725 |
+
"mtr",
|
| 726 |
+
"-r",
|
| 727 |
+
"-c", "10",
|
| 728 |
+
"--json",
|
| 729 |
+
ip
|
| 730 |
+
]
|
| 731 |
+
|
| 732 |
+
result = {"ip": ip, "timestamp": datetime.now().isoformat()}
|
| 733 |
+
|
| 734 |
+
try:
|
| 735 |
+
process = subprocess.Popen(
|
| 736 |
+
command,
|
| 737 |
+
stdout=subprocess.PIPE,
|
| 738 |
+
stderr=subprocess.PIPE,
|
| 739 |
+
text=True,
|
| 740 |
+
bufsize=1
|
| 741 |
+
)
|
| 742 |
+
|
| 743 |
+
stdout, stderr = process.communicate(timeout=60)
|
| 744 |
+
|
| 745 |
+
if process.returncode != 0:
|
| 746 |
+
error_msg = stderr.strip()
|
| 747 |
+
if "Name or service not known" in error_msg or "Temporary failure in name resolution" in error_msg:
|
| 748 |
+
user_error = f"Target hostname could not be resolved: {ip}. Please enter a valid IP or hostname."
|
| 749 |
+
elif "No such device" in error_msg or "Cannot find device" in error_msg:
|
| 750 |
+
user_error = "Network interface error: MTR could not find the required network device."
|
| 751 |
+
elif "Operation not permitted" in error_msg or "Permission denied" in error_msg:
|
| 752 |
+
user_error = "Permission error: MTR requires 'sudo' privileges."
|
| 753 |
+
else:
|
| 754 |
+
user_error = "MTR analysis failed."
|
| 755 |
+
|
| 756 |
+
logger.error(f"MTR command failed for {ip}: {error_msg} (Exit code: {process.returncode})")
|
| 757 |
+
result.update({
|
| 758 |
+
"error": user_error,
|
| 759 |
+
"details": error_msg,
|
| 760 |
+
"raw_output": stdout,
|
| 761 |
+
"raw_error": stderr
|
| 762 |
+
})
|
| 763 |
+
return result
|
| 764 |
+
|
| 765 |
+
if not stdout.strip():
|
| 766 |
+
logger.error(f"MTR returned empty output for {ip}.")
|
| 767 |
+
result["error"] = "MTR returned empty results. Target may be unreachable."
|
| 768 |
+
result["raw_output"] = stdout
|
| 769 |
+
result["raw_error"] = stderr
|
| 770 |
+
return result
|
| 771 |
+
|
| 772 |
+
try:
|
| 773 |
+
mtr_data = json.loads(stdout)
|
| 774 |
+
except json.JSONDecodeError as e:
|
| 775 |
+
logger.error(f"MTR JSON decoding error for {ip}: {e}. Raw stdout: {stdout}")
|
| 776 |
+
result.update({
|
| 777 |
+
"error": "MTR output could not be parsed. Format error.",
|
| 778 |
+
"details": f"JSON parsing error: {e}",
|
| 779 |
+
"raw_output": stdout,
|
| 780 |
+
"raw_error": stderr
|
| 781 |
+
})
|
| 782 |
+
return result
|
| 783 |
+
|
| 784 |
+
report = mtr_data.get('report', {})
|
| 785 |
+
hubs = report.get('hubs', [])
|
| 786 |
+
|
| 787 |
+
cleaned_hops = []
|
| 788 |
+
for idx, hop in enumerate(hubs, 1):
|
| 789 |
+
host = hop.get('host', '???').strip()
|
| 790 |
+
|
| 791 |
+
is_private = False
|
| 792 |
+
try:
|
| 793 |
+
if host != '???':
|
| 794 |
+
ip_obj = ipaddress.ip_address(host)
|
| 795 |
+
is_private = ip_obj.is_private
|
| 796 |
+
except ValueError:
|
| 797 |
+
pass
|
| 798 |
+
|
| 799 |
+
loss = float(hop.get('Loss%', 0.0))
|
| 800 |
+
avg = float(hop.get('Avg', 0.0))
|
| 801 |
+
best = float(hop.get('Best', 0.0))
|
| 802 |
+
worst = float(hop.get('Worst', 0.0))
|
| 803 |
+
last = float(hop.get('Last', 0.0))
|
| 804 |
+
snt = int(hop.get('Snt', 0))
|
| 805 |
+
|
| 806 |
+
cleaned_hops.append({
|
| 807 |
+
'count': idx,
|
| 808 |
+
'host': host,
|
| 809 |
+
'loss': loss,
|
| 810 |
+
'avg': avg,
|
| 811 |
+
'best': best,
|
| 812 |
+
'worst': worst,
|
| 813 |
+
'last': last,
|
| 814 |
+
'is_private': is_private,
|
| 815 |
+
'packets_sent': snt
|
| 816 |
+
})
|
| 817 |
+
|
| 818 |
+
overall_packet_loss = float(report.get('loss', 0.0))
|
| 819 |
+
if not hubs and overall_packet_loss == 0.0 and snt == 0:
|
| 820 |
+
overall_packet_loss = 100.0 if "No route to host" in stderr or "Host unreachable" in stderr else 0.0
|
| 821 |
+
|
| 822 |
+
result.update({
|
| 823 |
+
"hops": cleaned_hops,
|
| 824 |
+
"destination": report.get('dst', ip).strip(),
|
| 825 |
+
"packet_loss": overall_packet_loss,
|
| 826 |
+
"total_hops": len(cleaned_hops)
|
| 827 |
+
})
|
| 828 |
+
|
| 829 |
+
except subprocess.TimeoutExpired:
|
| 830 |
+
process.kill()
|
| 831 |
+
stdout, stderr = process.communicate()
|
| 832 |
+
error_msg = f"MTR analysis timed out ({ip})"
|
| 833 |
+
logger.error(error_msg)
|
| 834 |
+
result.update({
|
| 835 |
+
"error": "MTR analysis timed out. Target took too long to reach or network blocked.",
|
| 836 |
+
"details": "Analysis took longer than 60 seconds.",
|
| 837 |
+
"raw_output": stdout,
|
| 838 |
+
"raw_error": stderr
|
| 839 |
+
})
|
| 840 |
+
|
| 841 |
+
except FileNotFoundError:
|
| 842 |
+
logger.error("MTR command not found. Please ensure 'mtr' is installed on your system.")
|
| 843 |
+
result.update({
|
| 844 |
+
"error": "MTR command not found. 'mtr' is not installed on server.",
|
| 845 |
+
"details": "Please install MTR with 'sudo apt-get install mtr' or similar command."
|
| 846 |
+
})
|
| 847 |
+
except PermissionError as e:
|
| 848 |
+
logger.error(f"Permission error running MTR: {e}")
|
| 849 |
+
result.update({
|
| 850 |
+
"error": "Permission error running MTR.",
|
| 851 |
+
"details": f"Insufficient permissions to run MTR command. Error: {e}. Check 'sudo' configuration."
|
| 852 |
+
})
|
| 853 |
+
except Exception as e:
|
| 854 |
+
logger.error(f"MTR unexpected error for {ip}: {e}")
|
| 855 |
+
result.update({
|
| 856 |
+
"error": "An unexpected error occurred.",
|
| 857 |
+
"details": str(e)
|
| 858 |
+
})
|
| 859 |
+
|
| 860 |
+
return result
|
| 861 |
+
|
| 862 |
+
def process_bgp_data_for_d3(bgp_data, target_asn_str=None):
|
| 863 |
+
if not bgp_data or 'bgp_state' not in bgp_data or not bgp_data['bgp_state']:
|
| 864 |
+
return None
|
| 865 |
+
|
| 866 |
+
def get_color_for_path(path):
|
| 867 |
+
key = '-'.join(map(str, path))
|
| 868 |
+
hash_digest = hashlib.md5(key.encode()).hexdigest()
|
| 869 |
+
return f"#{hash_digest[:6]}"
|
| 870 |
+
|
| 871 |
+
def get_color_for_asn(asn):
|
| 872 |
+
key = str(asn)
|
| 873 |
+
hash_digest = hashlib.md5(key.encode()).hexdigest()
|
| 874 |
+
return f"#{hash_digest[6:12]}"
|
| 875 |
+
|
| 876 |
+
nodes = {}
|
| 877 |
+
raw_links = []
|
| 878 |
+
source_asns = set()
|
| 879 |
+
target_asn = int(target_asn_str) if target_asn_str and target_asn_str.isdigit() else None
|
| 880 |
+
|
| 881 |
+
for entry in bgp_data.get('bgp_state', []):
|
| 882 |
+
path = entry.get('path', [])
|
| 883 |
+
if not path:
|
| 884 |
+
continue
|
| 885 |
+
|
| 886 |
+
source_asns.add(path[0])
|
| 887 |
+
path_color = get_color_for_path(path)
|
| 888 |
+
|
| 889 |
+
path_details = {
|
| 890 |
+
"community": entry.get("community", []),
|
| 891 |
+
"full_path": entry.get("path", []),
|
| 892 |
+
"source_id": entry.get("source_id", "Unknown"),
|
| 893 |
+
"target_prefix": entry.get("target_prefix", "Unknown")
|
| 894 |
+
}
|
| 895 |
+
|
| 896 |
+
for asn in path:
|
| 897 |
+
if asn not in nodes:
|
| 898 |
+
nodes[asn] = {"id": str(asn), "label": f"AS{asn}", "color": get_color_for_asn(asn), "node_type": "transit"}
|
| 899 |
+
|
| 900 |
+
for i in range(len(path) - 1):
|
| 901 |
+
link_data = {
|
| 902 |
+
"source": str(path[i]),
|
| 903 |
+
"target": str(path[i+1]),
|
| 904 |
+
"color": path_color,
|
| 905 |
+
**path_details
|
| 906 |
+
}
|
| 907 |
+
raw_links.append(link_data)
|
| 908 |
+
|
| 909 |
+
for asn, node_data in nodes.items():
|
| 910 |
+
if asn == target_asn:
|
| 911 |
+
node_data["type"] = "target"
|
| 912 |
+
elif asn in source_asns:
|
| 913 |
+
node_data["type"] = "source"
|
| 914 |
+
if not raw_links:
|
| 915 |
+
return None
|
| 916 |
+
|
| 917 |
+
link_counts = defaultdict(int)
|
| 918 |
+
for link in raw_links:
|
| 919 |
+
key = tuple(sorted((link['source'], link['target'])))
|
| 920 |
+
link_counts[key] += 1
|
| 921 |
+
|
| 922 |
+
processed_links = []
|
| 923 |
+
link_group_indices = defaultdict(int)
|
| 924 |
+
for link in raw_links:
|
| 925 |
+
key = tuple(sorted((link['source'], link['target'])))
|
| 926 |
+
total_links_in_group = link_counts[key]
|
| 927 |
+
new_link = link.copy()
|
| 928 |
+
new_link['total_links'] = total_links_in_group
|
| 929 |
+
new_link['link_index'] = link_group_indices[key]
|
| 930 |
+
processed_links.append(new_link)
|
| 931 |
+
link_group_indices[key] += 1
|
| 932 |
+
|
| 933 |
+
return {"nodes": list(nodes.values()), "links": processed_links}
|
| 934 |
+
|
| 935 |
def get_authoritative_rdap_url(query):
|
| 936 |
query = query.strip().upper().replace("AS", "")
|
| 937 |
is_ip = "." in query or ":" in query
|
|
|
|
| 1408 |
'reverse_dns': executor.submit(get_reverse_dns, ip_to_query),
|
| 1409 |
'ping': executor.submit(get_ping_latency, ip_to_query),
|
| 1410 |
'dnsbl': executor.submit(check_dnsbl, ip_to_query),
|
| 1411 |
+
'mtr': executor.submit(run_mtr_analysis, ip_to_query),
|
| 1412 |
+
'bgpview': executor.submit(get_bgpview_prefix_details, ip_to_query),
|
| 1413 |
'traceroute': executor.submit(enriched_traceroute, ip_to_query)
|
| 1414 |
}
|
| 1415 |
results = {name: future.result() for name, future in tasks.items()}
|
|
|
|
| 1418 |
additional_data = results.get('ripestat')
|
| 1419 |
reverse_dns = results.get('reverse_dns')
|
| 1420 |
ping_data = results.get('ping')
|
| 1421 |
+
mtr_data = results.get('mtr')
|
| 1422 |
dnsbl_results = results.get('dnsbl')
|
| 1423 |
+
bgpview_prefix = results.get('bgpview')
|
| 1424 |
traceroute_locations = results.get('traceroute')
|
| 1425 |
|
| 1426 |
# DNS records for domains
|
|
|
|
| 1438 |
origin_asn = ipinfo_data['asn'].get('asn', '').replace("AS", "")
|
| 1439 |
else:
|
| 1440 |
origin_asn = get_asn_from_ip(ip_to_query)
|
| 1441 |
+
|
| 1442 |
+
# ASN info with PeeringDB (sequential as it depends on ASN)
|
| 1443 |
+
peeringdb_data = get_peeringdb_info(origin_asn)
|
| 1444 |
|
| 1445 |
# HTTP and SSL analysis
|
| 1446 |
target_host = query if is_domain else ip_to_query
|
|
|
|
| 1454 |
else:
|
| 1455 |
port_scan = {"error": "Nmap not installed"}
|
| 1456 |
|
| 1457 |
+
# ASN map data
|
| 1458 |
+
_, asn_map_data = get_asn_map_data(ip_to_query)
|
| 1459 |
+
|
| 1460 |
# Security report
|
| 1461 |
security_report = generate_security_report(
|
| 1462 |
ipinfo_data, http_headers, port_scan, dnsbl_results, ip_to_query
|
| 1463 |
)
|
| 1464 |
|
| 1465 |
+
# BGP graph data
|
| 1466 |
+
if additional_data and 'BGP State' in additional_data:
|
| 1467 |
+
bgp_graph_data = process_bgp_data_for_d3(additional_data['BGP State'], origin_asn)
|
| 1468 |
+
|
| 1469 |
except requests.exceptions.ConnectionError as conn_err:
|
| 1470 |
error = f"Connection error: {str(conn_err)}"
|
| 1471 |
logger.error(error)
|
|
|
|
| 1493 |
dnsbl_results=dnsbl_results,
|
| 1494 |
security_report=security_report,
|
| 1495 |
traceroute_locations=traceroute_locations,
|
| 1496 |
+
asn_map_data=asn_map_data,
|
| 1497 |
+
bgp_graph_data=bgp_graph_data,
|
| 1498 |
dns_records=dns_records,
|
| 1499 |
authoritative_dns=authoritative_dns,
|
| 1500 |
dnssec_status=dnssec_status,
|
| 1501 |
+
origin_asn=origin_asn,
|
| 1502 |
+
bgpview_prefix=bgpview_prefix,
|
| 1503 |
+
mtr_data=mtr_data,
|
| 1504 |
+
peeringdb_data=peeringdb_data
|
| 1505 |
)
|
| 1506 |
|
| 1507 |
# API endpoint for external access
|