diff --git "a/src/streamlit_app.py" "b/src/streamlit_app.py"
--- "a/src/streamlit_app.py"
+++ "b/src/streamlit_app.py"
@@ -21,13 +21,34 @@ from streamlit_folium import st_folium
warnings.filterwarnings("ignore")
-# Solusi Hugging Face Spaces: arahkan ke direktori yang writable
-os.environ["HOME"] = "/tmp"
-os.environ["MPLCONFIGDIR"] = "/tmp/mpl_config"
-os.makedirs("/tmp/.streamlit", exist_ok=True)
-os.makedirs("/tmp/mpl_config", exist_ok=True)
+# === FIXED ENVIRONMENT SETUP FOR HUGGING FACE SPACES ===
+# Set up writable directories for Hugging Face Spaces
+TEMP_DIR = "/tmp"
+os.environ["HOME"] = TEMP_DIR
+os.environ["MPLCONFIGDIR"] = f"{TEMP_DIR}/matplotlib"
+os.environ["STREAMLIT_CONFIG_DIR"] = f"{TEMP_DIR}/.streamlit"
+
+# Create necessary directories
+for directory in [
+ f"{TEMP_DIR}/.streamlit",
+ f"{TEMP_DIR}/matplotlib",
+ f"{TEMP_DIR}/.config",
+ f"{TEMP_DIR}/.cache",
+]:
+ try:
+ os.makedirs(directory, exist_ok=True)
+ except Exception as e:
+ st.warning(f"Could not create directory {directory}: {e}")
+
+# Set matplotlib backend to Agg (non-interactive) for server environment
+try:
+ import matplotlib
+
+ matplotlib.use("Agg")
+except Exception as e:
+ st.warning(f"Matplotlib configuration warning: {e}")
-# Konfigurasi halaman
+# === STREAMLIT CONFIGURATION ===
st.set_page_config(
page_title="Analisis Jaringan Listrik DIY",
page_icon="⚡",
@@ -35,7 +56,7 @@ st.set_page_config(
initial_sidebar_state="expanded",
)
-# CSS untuk styling
+# === CSS STYLING ===
st.markdown(
"""
""",
unsafe_allow_html=True,
)
+# === UTILITY FUNCTIONS ===
@st.cache_data
-def process_uploaded_data(uploaded_file):
- """Proses data dari file ZIP yang diupload"""
+def safe_file_processing(uploaded_file):
+ """Safely process uploaded file with error handling"""
try:
- # Ekstrak file zip
- with zipfile.ZipFile(uploaded_file) as zip_file:
- # Buat temporary directory
- with tempfile.TemporaryDirectory() as temp_dir:
+ # Create temporary directory for file processing
+ with tempfile.TemporaryDirectory() as temp_dir:
+ # Save uploaded file
+ temp_file_path = os.path.join(temp_dir, uploaded_file.name)
+ with open(temp_file_path, "wb") as f:
+ f.write(uploaded_file.getvalue())
+
+ # Extract and process ZIP
+ with zipfile.ZipFile(temp_file_path) as zip_file:
zip_file.extractall(temp_dir)
- # Cari file shapefile
+ # Find shapefile
shp_files = [f for f in os.listdir(temp_dir) if f.endswith(".shp")]
if not shp_files:
raise ValueError("File shapefile tidak ditemukan dalam ZIP")
- # Baca shapefile
+ # Read shapefile
shp_path = os.path.join(temp_dir, shp_files[0])
gdf = gpd.read_file(shp_path)
return gdf
+
except Exception as e:
st.error(f"Error memproses file: {str(e)}")
return None
@st.cache_data
-def load_local_zip_data(zip_path):
- """Load data dari file ZIP lokal"""
+def safe_url_download(data_url):
+ """Safely download data from URL with timeout and error handling"""
try:
- with zipfile.ZipFile(zip_path, "r") as zip_file:
+ headers = {
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
+ }
+
+ with st.spinner("Mengunduh data dari server..."):
+ response = requests.get(data_url, timeout=60, headers=headers)
+ response.raise_for_status()
+
with tempfile.TemporaryDirectory() as temp_dir:
- zip_file.extractall(temp_dir)
+ with zipfile.ZipFile(io.BytesIO(response.content)) as zip_file:
+ zip_file.extractall(temp_dir)
- # Cari file shapefile
- shp_files = [f for f in os.listdir(temp_dir) if f.endswith(".shp")]
- if not shp_files:
- raise ValueError("File shapefile tidak ditemukan dalam ZIP")
+ shp_files = [f for f in os.listdir(temp_dir) if f.endswith(".shp")]
+ if not shp_files:
+ raise ValueError(
+ "File shapefile tidak ditemukan dalam download"
+ )
- # Baca shapefile
- shp_path = os.path.join(temp_dir, shp_files[0])
- gdf = gpd.read_file(shp_path)
+ shp_path = os.path.join(temp_dir, shp_files[0])
+ gdf = gpd.read_file(shp_path)
+ return gdf
- return gdf
+ except requests.exceptions.Timeout:
+ st.error("⏱️ Timeout: Server terlalu lama merespons")
+ return None
+ except requests.exceptions.ConnectionError:
+ st.error("🌐 Error: Tidak dapat terhubung ke server")
+ return None
except Exception as e:
- st.error(f"Error memuat file lokal: {str(e)}")
+ st.error(f"Error mengunduh data: {str(e)}")
return None
def create_network_graph(gdf):
"""Membuat graf jaringan dari data geografis dengan penanganan yang lebih baik"""
- G = nx.Graph()
-
- # Data sudah dalam UTM Zone 49S (EPSG:32749)
- gdf_utm = gdf.copy()
- if gdf.crs != "EPSG:32749":
- gdf_utm = gdf.to_crs("EPSG:32749")
-
- # Dictionary untuk menyimpan koordinat ke node ID
- coord_to_node = {}
- node_counter = 0
- edges = []
- line_segments = []
-
- # Tolerance untuk menggabungkan koordinat yang sangat dekat (dalam meter)
- tolerance = 100.0 # 10 meter tolerance
-
- def get_or_create_node(coord):
- """Dapatkan node ID untuk koordinat, atau buat baru jika belum ada"""
- nonlocal node_counter
-
- # Cari node yang sudah ada dalam tolerance
- for existing_coord, node_id in coord_to_node.items():
- if (
- abs(existing_coord[0] - coord[0]) < tolerance
- and abs(existing_coord[1] - coord[1]) < tolerance
- ):
- return node_id
-
- # Buat node baru
- coord_to_node[coord] = node_counter
- node_counter += 1
- return node_counter - 1
-
- for idx, row in gdf_utm.iterrows():
- geom = row.geometry
- line_name = row.get("nama", f"Line_{idx}")
- line_id = row.get("id", idx)
-
- # Handle MultiLineString dan LineString
- if geom.geom_type == "MultiLineString":
- # Pecah MultiLineString menjadi LineString individual
- for i, line in enumerate(geom.geoms):
- coords = list(line.coords)
+ try:
+ G = nx.Graph()
+
+ # Data sudah dalam UTM Zone 49S (EPSG:32749)
+ gdf_utm = gdf.copy()
+ if gdf.crs != "EPSG:32749":
+ gdf_utm = gdf.to_crs("EPSG:32749")
+
+ # Dictionary untuk menyimpan koordinat ke node ID
+ coord_to_node = {}
+ node_counter = 0
+ edges = []
+ line_segments = []
+
+ # Tolerance untuk menggabungkan koordinat yang sangat dekat (dalam meter)
+ tolerance = 100.0 # 100 meter tolerance
+
+ def get_or_create_node(coord):
+ """Dapatkan node ID untuk koordinat, atau buat baru jika belum ada"""
+ nonlocal node_counter
+
+ # Cari node yang sudah ada dalam tolerance
+ for existing_coord, node_id in coord_to_node.items():
+ if (
+ abs(existing_coord[0] - coord[0]) < tolerance
+ and abs(existing_coord[1] - coord[1]) < tolerance
+ ):
+ return node_id
+
+ # Buat node baru
+ coord_to_node[coord] = node_counter
+ node_counter += 1
+ return node_counter - 1
+
+ for idx, row in gdf_utm.iterrows():
+ geom = row.geometry
+ line_name = row.get("nama", f"Line_{idx}")
+ line_id = row.get("id", idx)
+
+ # Handle MultiLineString dan LineString
+ if geom.geom_type == "MultiLineString":
+ # Pecah MultiLineString menjadi LineString individual
+ for i, line in enumerate(geom.geoms):
+ coords = list(line.coords)
+ if len(coords) >= 2:
+ # Untuk setiap segmen dalam line, buat edges berturut-turut
+ for j in range(len(coords) - 1):
+ start_point = coords[j]
+ end_point = coords[j + 1]
+
+ # Dapatkan atau buat node
+ start_idx = get_or_create_node(start_point)
+ end_idx = get_or_create_node(end_point)
+
+ # Hitung panjang segmen
+ segment_length = (
+ (end_point[0] - start_point[0]) ** 2
+ + (end_point[1] - start_point[1]) ** 2
+ ) ** 0.5
+
+ edge_data = {
+ "weight": segment_length,
+ "line_id": f"{line_id}_{i}_{j}",
+ "nama": f"{line_name}_segment_{i}_{j}",
+ "length_m": segment_length,
+ "length_km": segment_length / 1000,
+ }
+
+ edges.append((start_idx, end_idx, edge_data))
+
+ # Buat geometri LineString untuk segmen ini
+ segment_geom = LineString([start_point, end_point])
+ line_segments.append(
+ {
+ "geometry": segment_geom,
+ "start_node": start_idx,
+ "end_node": end_idx,
+ "nama": f"{line_name}_segment_{i}_{j}",
+ "length_m": segment_length,
+ "length_km": segment_length / 1000,
+ }
+ )
+
+ elif geom.geom_type == "LineString":
+ coords = list(geom.coords)
if len(coords) >= 2:
- # Untuk setiap segmen dalam line, buat edges berturut-turut
+ # Untuk LineString, buat edges berturut-turut untuk setiap segmen
for j in range(len(coords) - 1):
start_point = coords[j]
end_point = coords[j + 1]
@@ -176,8 +263,8 @@ def create_network_graph(gdf):
edge_data = {
"weight": segment_length,
- "line_id": f"{line_id}_{i}_{j}",
- "nama": f"{line_name}_segment_{i}_{j}",
+ "line_id": f"{line_id}_{j}",
+ "nama": f"{line_name}_segment_{j}",
"length_m": segment_length,
"length_km": segment_length / 1000,
}
@@ -191,86 +278,61 @@ def create_network_graph(gdf):
"geometry": segment_geom,
"start_node": start_idx,
"end_node": end_idx,
- "nama": f"{line_name}_segment_{i}_{j}",
+ "nama": f"{line_name}_segment_{j}",
"length_m": segment_length,
"length_km": segment_length / 1000,
}
)
- elif geom.geom_type == "LineString":
- coords = list(geom.coords)
- if len(coords) >= 2:
- # Untuk LineString, buat edges berturut-turut untuk setiap segmen
- for j in range(len(coords) - 1):
- start_point = coords[j]
- end_point = coords[j + 1]
-
- # Dapatkan atau buat node
- start_idx = get_or_create_node(start_point)
- end_idx = get_or_create_node(end_point)
-
- # Hitung panjang segmen
- segment_length = (
- (end_point[0] - start_point[0]) ** 2
- + (end_point[1] - start_point[1]) ** 2
- ) ** 0.5
-
- edge_data = {
- "weight": segment_length,
- "line_id": f"{line_id}_{j}",
- "nama": f"{line_name}_segment_{j}",
- "length_m": segment_length,
- "length_km": segment_length / 1000,
- }
-
- edges.append((start_idx, end_idx, edge_data))
-
- # Buat geometri LineString untuk segmen ini
- segment_geom = LineString([start_point, end_point])
- line_segments.append(
- {
- "geometry": segment_geom,
- "start_node": start_idx,
- "end_node": end_idx,
- "nama": f"{line_name}_segment_{j}",
- "length_m": segment_length,
- "length_km": segment_length / 1000,
- }
- )
+ # Tambahkan nodes ke graf dengan informasi posisi
+ for coord, node_id in coord_to_node.items():
+ node_data = {
+ "pos": coord,
+ "type": "junction",
+ "x": coord[0], # UTM Easting
+ "y": coord[1], # UTM Northing
+ "lat": None, # Will be calculated when needed
+ "lon": None, # Will be calculated when needed
+ }
+ G.add_node(node_id, **node_data)
- # Tambahkan nodes ke graf dengan informasi posisi
- for coord, node_id in coord_to_node.items():
- node_data = {
- "pos": coord,
- "type": "junction",
- "x": coord[0], # UTM Easting
- "y": coord[1], # UTM Northing
- "lat": None, # Will be calculated when needed
- "lon": None, # Will be calculated when needed
- }
- G.add_node(node_id, **node_data)
+ # Tambahkan edges ke graf
+ G.add_edges_from(edges)
- # Tambahkan edges ke graf
- G.add_edges_from(edges)
+ # Konversi coord_to_node menjadi list nodes untuk kompatibilitas
+ nodes = [None] * len(coord_to_node)
+ for coord, node_id in coord_to_node.items():
+ nodes[node_id] = coord
- # Konversi coord_to_node menjadi list nodes untuk kompatibilitas
- nodes = [None] * len(coord_to_node)
- for coord, node_id in coord_to_node.items():
- nodes[node_id] = coord
+ return G, nodes, gdf_utm, line_segments
- return G, nodes, gdf_utm, line_segments
+ except Exception as e:
+ st.error(f"Error creating network graph: {str(e)}")
+ return nx.Graph(), [], gdf, []
def calculate_centrality_measures(G):
- """Hitung berbagai ukuran sentralitas"""
+ """Hitung berbagai ukuran sentralitas dengan error handling"""
centrality_measures = {}
try:
+ if G.number_of_nodes() == 0:
+ return {"degree": {}, "betweenness": {}, "closeness": {}, "eigenvector": {}}
+
centrality_measures["degree"] = nx.degree_centrality(G)
centrality_measures["betweenness"] = nx.betweenness_centrality(G)
centrality_measures["closeness"] = nx.closeness_centrality(G)
- centrality_measures["eigenvector"] = nx.eigenvector_centrality(G, max_iter=1000)
- except:
+
+ try:
+ centrality_measures["eigenvector"] = nx.eigenvector_centrality(
+ G, max_iter=1000
+ )
+ except:
+ # Jika eigenvector centrality gagal, gunakan nilai default
+ centrality_measures["eigenvector"] = {node: 0.0 for node in G.nodes()}
+
+ except Exception as e:
+ st.warning(f"Error calculating centrality measures: {str(e)}")
# Jika ada masalah dalam perhitungan, gunakan nilai default
num_nodes = G.number_of_nodes()
for measure in ["degree", "betweenness", "closeness", "eigenvector"]:
@@ -280,345 +342,423 @@ def calculate_centrality_measures(G):
def find_minimum_spanning_tree(G):
- """Cari Minimum Spanning Tree menggunakan algoritma Kruskal"""
- if G.number_of_nodes() == 0:
- return nx.Graph()
+ """Cari Minimum Spanning Tree menggunakan algoritma Kruskal dengan error handling"""
+ try:
+ if G.number_of_nodes() == 0:
+ return nx.Graph()
- mst = nx.minimum_spanning_tree(G, weight="weight", algorithm="kruskal")
- return mst
+ mst = nx.minimum_spanning_tree(G, weight="weight", algorithm="kruskal")
+ return mst
+ except Exception as e:
+ st.warning(f"Error finding MST: {str(e)}")
+ return nx.Graph()
def analyze_network_connectivity(G, line_segments=None):
- """Analisis konektivitas jaringan dengan detail tambahan"""
+ """Analisis konektivitas jaringan dengan detail tambahan dan error handling"""
analysis = {}
- analysis["num_nodes"] = G.number_of_nodes()
- analysis["num_edges"] = G.number_of_edges()
- analysis["is_connected"] = nx.is_connected(G)
- analysis["num_components"] = nx.number_connected_components(G)
+ try:
+ analysis["num_nodes"] = G.number_of_nodes()
+ analysis["num_edges"] = G.number_of_edges()
+ analysis["is_connected"] = (
+ nx.is_connected(G) if G.number_of_nodes() > 0 else False
+ )
+ analysis["num_components"] = nx.number_connected_components(G)
+
+ if G.number_of_nodes() > 0:
+ analysis["density"] = nx.density(G)
+
+ if nx.is_connected(G):
+ try:
+ analysis["diameter"] = nx.diameter(G)
+ analysis["average_path_length"] = nx.average_shortest_path_length(G)
+ except:
+ analysis["diameter"] = "N/A (Error computing)"
+ analysis["average_path_length"] = "N/A (Error computing)"
+ else:
+ analysis["diameter"] = "N/A (Graf tidak terhubung)"
+ analysis["average_path_length"] = "N/A (Graf tidak terhubung)"
- if G.number_of_nodes() > 0:
- analysis["density"] = nx.density(G)
+ # Degree statistics
+ degrees = [d for n, d in G.degree()]
+ analysis["avg_degree"] = np.mean(degrees) if degrees else 0
+ analysis["max_degree"] = max(degrees) if degrees else 0
+ analysis["min_degree"] = min(degrees) if degrees else 0
+
+ # Network length statistics (dari line_segments)
+ if line_segments:
+ total_length_m = sum(seg["length_m"] for seg in line_segments)
+ total_length_km = total_length_m / 1000
+ avg_segment_length = (
+ total_length_m / len(line_segments) if line_segments else 0
+ )
- if nx.is_connected(G):
- analysis["diameter"] = nx.diameter(G)
- analysis["average_path_length"] = nx.average_shortest_path_length(G)
+ analysis["total_network_length_m"] = total_length_m
+ analysis["total_network_length_km"] = total_length_km
+ analysis["avg_segment_length_m"] = avg_segment_length
+ analysis["avg_segment_length_km"] = avg_segment_length / 1000
+ analysis["longest_segment_km"] = (
+ max(seg["length_km"] for seg in line_segments)
+ if line_segments
+ else 0
+ )
+ analysis["shortest_segment_km"] = (
+ min(seg["length_km"] for seg in line_segments)
+ if line_segments
+ else 0
+ )
else:
- analysis["diameter"] = "N/A (Graf tidak terhubung)"
- analysis["average_path_length"] = "N/A (Graf tidak terhubung)"
-
- # Degree statistics
- degrees = [d for n, d in G.degree()]
- analysis["avg_degree"] = np.mean(degrees) if degrees else 0
- analysis["max_degree"] = max(degrees) if degrees else 0
- analysis["min_degree"] = min(degrees) if degrees else 0
-
- # Network length statistics (dari line_segments)
- if line_segments:
- total_length_m = sum(seg["length_m"] for seg in line_segments)
- total_length_km = total_length_m / 1000
- avg_segment_length = (
- total_length_m / len(line_segments) if line_segments else 0
- )
+ # Default values for empty graph
+ for key in ["density", "avg_degree", "max_degree", "min_degree"]:
+ analysis[key] = 0
+ analysis["diameter"] = "N/A"
+ analysis["average_path_length"] = "N/A"
- analysis["total_network_length_m"] = total_length_m
- analysis["total_network_length_km"] = total_length_km
- analysis["avg_segment_length_m"] = avg_segment_length
- analysis["avg_segment_length_km"] = avg_segment_length / 1000
- analysis["longest_segment_km"] = (
- max(seg["length_km"] for seg in line_segments) if line_segments else 0
- )
- analysis["shortest_segment_km"] = (
- min(seg["length_km"] for seg in line_segments) if line_segments else 0
- )
+ except Exception as e:
+ st.error(f"Error analyzing network connectivity: {str(e)}")
+ # Return minimal analysis
+ analysis = {
+ "num_nodes": 0,
+ "num_edges": 0,
+ "is_connected": False,
+ "num_components": 0,
+ "density": 0,
+ "diameter": "N/A",
+ "average_path_length": "N/A",
+ "avg_degree": 0,
+ "max_degree": 0,
+ "min_degree": 0,
+ }
return analysis
def create_network_visualization(G, nodes, centrality_measures, show_labels=False):
- """Buat visualisasi jaringan menggunakan Plotly dengan layout yang lebih baik"""
- if G.number_of_nodes() == 0:
- return go.Figure()
-
- # Gunakan posisi asli dari koordinat UTM, kemudian normalisasi untuk visualisasi
- pos = {}
- node_coords = [(G.nodes[node]["x"], G.nodes[node]["y"]) for node in G.nodes()]
+ """Buat visualisasi jaringan menggunakan Plotly dengan error handling"""
+ try:
+ if G.number_of_nodes() == 0:
+ fig = go.Figure()
+ fig.add_annotation(
+ x=0.5,
+ y=0.5,
+ text="Tidak ada data untuk divisualisasikan",
+ showarrow=False,
+ font=dict(size=16),
+ )
+ return fig
+
+ # Gunakan posisi asli dari koordinat UTM, kemudian normalisasi untuk visualisasi
+ pos = {}
+ node_coords = [(G.nodes[node]["x"], G.nodes[node]["y"]) for node in G.nodes()]
+
+ if node_coords:
+ # Normalisasi koordinat untuk visualisasi yang lebih baik
+ min_x = min(coord[0] for coord in node_coords)
+ max_x = max(coord[0] for coord in node_coords)
+ min_y = min(coord[1] for coord in node_coords)
+ max_y = max(coord[1] for coord in node_coords)
+
+ # Avoid division by zero
+ range_x = max_x - min_x if max_x != min_x else 1
+ range_y = max_y - min_y if max_y != min_y else 1
+
+ for node in G.nodes():
+ x_norm = (G.nodes[node]["x"] - min_x) / range_x
+ y_norm = (G.nodes[node]["y"] - min_y) / range_y
+ pos[node] = (x_norm, y_norm)
+ else:
+ # Fallback ke spring layout jika tidak ada koordinat
+ pos = nx.spring_layout(G, k=1, iterations=50)
+
+ # Siapkan data untuk edges
+ edge_x = []
+ edge_y = []
+ edge_info = []
+
+ for edge in G.edges(data=True):
+ if edge[0] in pos and edge[1] in pos:
+ x0, y0 = pos[edge[0]]
+ x1, y1 = pos[edge[1]]
+ edge_x.extend([x0, x1, None])
+ edge_y.extend([y0, y1, None])
+
+ weight = edge[2].get("weight", 0)
+ edge_info.append(f"Weight: {weight:.2f}m")
+
+ # Trace untuk edges dengan styling yang lebih baik
+ edge_trace = go.Scatter(
+ x=edge_x,
+ y=edge_y,
+ line=dict(width=0.8, color="rgba(125,125,125,0.8)"),
+ hoverinfo="none",
+ mode="lines",
+ name="Saluran Listrik",
+ )
- if node_coords:
- # Normalisasi koordinat untuk visualisasi yang lebih baik
- min_x = min(coord[0] for coord in node_coords)
- max_x = max(coord[0] for coord in node_coords)
- min_y = min(coord[1] for coord in node_coords)
- max_y = max(coord[1] for coord in node_coords)
+ # Siapkan data untuk nodes
+ node_x = []
+ node_y = []
+ node_text = []
+ node_color = []
+ node_size = []
- # Avoid division by zero
- range_x = max_x - min_x if max_x != min_x else 1
- range_y = max_y - min_y if max_y != min_y else 1
+ # Gunakan degree centrality untuk pewarnaan dan ukuran
+ degree_cent = centrality_measures.get("degree", {})
for node in G.nodes():
- x_norm = (G.nodes[node]["x"] - min_x) / range_x
- y_norm = (G.nodes[node]["y"] - min_y) / range_y
- pos[node] = (x_norm, y_norm)
- else:
- # Fallback ke spring layout jika tidak ada koordinat
- pos = nx.spring_layout(G, k=1, iterations=50)
-
- # Siapkan data untuk edges
- edge_x = []
- edge_y = []
- edge_info = []
-
- for edge in G.edges(data=True):
- if edge[0] in pos and edge[1] in pos:
- x0, y0 = pos[edge[0]]
- x1, y1 = pos[edge[1]]
- edge_x.extend([x0, x1, None])
- edge_y.extend([y0, y1, None])
-
- weight = edge[2].get("weight", 0)
- edge_info.append(f"Weight: {weight:.2f}m")
-
- # Trace untuk edges dengan styling yang lebih baik
- edge_trace = go.Scatter(
- x=edge_x,
- y=edge_y,
- line=dict(width=0.8, color="rgba(125,125,125,0.8)"),
- hoverinfo="none",
- mode="lines",
- name="Saluran Listrik",
- )
-
- # Siapkan data untuk nodes
- node_x = []
- node_y = []
- node_text = []
- node_color = []
- node_size = []
-
- # Gunakan degree centrality untuk pewarnaan dan ukuran
- degree_cent = centrality_measures["degree"]
-
- for node in G.nodes():
- if node in pos:
- x, y = pos[node]
- node_x.append(x)
- node_y.append(y)
-
- # Informasi node
- adjacencies = list(G.neighbors(node))
- node_info = f"Node: {node}
"
- node_info += f"Koneksi: {len(adjacencies)}
"
- node_info += f"Degree Centrality: {degree_cent[node]:.3f}
"
- node_info += (
- f'Betweenness: {centrality_measures["betweenness"][node]:.3f}
'
- )
- node_info += f'Closeness: {centrality_measures["closeness"][node]:.3f}'
-
- node_text.append(node_info)
- node_color.append(degree_cent[node])
-
- # Ukuran node berdasarkan degree centrality
- base_size = 8
- size_multiplier = 20
- node_size.append(base_size + degree_cent[node] * size_multiplier)
-
- # Trace untuk nodes dengan styling yang lebih menarik
- node_trace = go.Scatter(
- x=node_x,
- y=node_y,
- mode="markers+text" if show_labels else "markers",
- hoverinfo="text",
- text=[str(i) for i in range(len(node_x))] if show_labels else [],
- textposition="middle center",
- textfont=dict(size=8, color="white"),
- hovertext=node_text,
- marker=dict(
- showscale=True,
- colorscale="Viridis",
- reversescale=True,
- color=node_color,
- size=node_size,
- colorbar=dict(
- thickness=15,
- len=0.7,
- x=1.02,
- title=dict(text="Degree Centrality", font=dict(size=12)),
- tickfont=dict(size=10),
+ if node in pos:
+ x, y = pos[node]
+ node_x.append(x)
+ node_y.append(y)
+
+ # Informasi node
+ adjacencies = list(G.neighbors(node))
+ node_info = f"Node: {node}
"
+ node_info += f"Koneksi: {len(adjacencies)}
"
+ node_info += f"Degree Centrality: {degree_cent.get(node, 0):.3f}
"
+ node_info += f'Betweenness: {centrality_measures.get("betweenness", {}).get(node, 0):.3f}
'
+ node_info += f'Closeness: {centrality_measures.get("closeness", {}).get(node, 0):.3f}'
+
+ node_text.append(node_info)
+ node_color.append(degree_cent.get(node, 0))
+
+ # Ukuran node berdasarkan degree centrality
+ base_size = 8
+ size_multiplier = 20
+ node_size.append(base_size + degree_cent.get(node, 0) * size_multiplier)
+
+ # Trace untuk nodes dengan styling yang lebih menarik
+ node_trace = go.Scatter(
+ x=node_x,
+ y=node_y,
+ mode="markers+text" if show_labels else "markers",
+ hoverinfo="text",
+ text=[str(i) for i in range(len(node_x))] if show_labels else [],
+ textposition="middle center",
+ textfont=dict(size=8, color="white"),
+ hovertext=node_text,
+ marker=dict(
+ showscale=True,
+ colorscale="Viridis",
+ reversescale=True,
+ color=node_color,
+ size=node_size,
+ colorbar=dict(
+ thickness=15,
+ len=0.7,
+ x=1.02,
+ title=dict(text="Degree Centrality", font=dict(size=12)),
+ tickfont=dict(size=10),
+ ),
+ line=dict(width=1, color="white"),
+ opacity=0.9,
),
- line=dict(width=1, color="white"),
- opacity=0.9,
- ),
- name="Node/Junction",
- )
+ name="Node/Junction",
+ )
- # Buat figure
- fig = go.Figure(
- data=[edge_trace, node_trace],
- layout=go.Layout(
- title=dict(
- text="Visualisasi Graf Jaringan Listrik DIY", font=dict(size=16), x=0.5
- ),
- showlegend=False,
- hovermode="closest",
- margin=dict(b=40, l=40, r=60, t=80),
- annotations=[
- dict(
- text="Node berukuran dan berwarna berdasarkan Degree Centrality.
Node yang lebih besar dan gelap = lebih penting dalam jaringan",
- showarrow=False,
- xref="paper",
- yref="paper",
- x=0.02,
- y=0.02,
- xanchor="left",
- yanchor="bottom",
- font=dict(color="#666", size=10),
- bgcolor="rgba(255,255,255,0.8)",
- bordercolor="#ccc",
- borderwidth=1,
- )
- ],
- xaxis=dict(
- showgrid=True,
- zeroline=False,
- showticklabels=False,
- gridcolor="rgba(128,128,128,0.2)",
- ),
- yaxis=dict(
- showgrid=True,
- zeroline=False,
- showticklabels=False,
- gridcolor="rgba(128,128,128,0.2)",
+ # Buat figure
+ fig = go.Figure(
+ data=[edge_trace, node_trace],
+ layout=go.Layout(
+ title=dict(
+ text="Visualisasi Graf Jaringan Listrik DIY",
+ font=dict(size=16),
+ x=0.5,
+ ),
+ showlegend=False,
+ hovermode="closest",
+ margin=dict(b=40, l=40, r=60, t=80),
+ annotations=[
+ dict(
+ text="Node berukuran dan berwarna berdasarkan Degree Centrality.
Node yang lebih besar dan gelap = lebih penting dalam jaringan",
+ showarrow=False,
+ xref="paper",
+ yref="paper",
+ x=0.02,
+ y=0.02,
+ xanchor="left",
+ yanchor="bottom",
+ font=dict(color="#666", size=10),
+ bgcolor="rgba(255,255,255,0.8)",
+ bordercolor="#ccc",
+ borderwidth=1,
+ )
+ ],
+ xaxis=dict(
+ showgrid=True,
+ zeroline=False,
+ showticklabels=False,
+ gridcolor="rgba(128,128,128,0.2)",
+ ),
+ yaxis=dict(
+ showgrid=True,
+ zeroline=False,
+ showticklabels=False,
+ gridcolor="rgba(128,128,128,0.2)",
+ ),
+ plot_bgcolor="rgba(240,240,240,0.1)",
+ height=700,
),
- plot_bgcolor="rgba(240,240,240,0.1)",
- height=700,
- ),
- )
+ )
- return fig
+ return fig
+
+ except Exception as e:
+ st.error(f"Error creating network visualization: {str(e)}")
+ fig = go.Figure()
+ fig.add_annotation(
+ x=0.5,
+ y=0.5,
+ text=f"Error dalam visualisasi: {str(e)}",
+ showarrow=False,
+ font=dict(size=14),
+ )
+ return fig
def create_centrality_comparison(centrality_measures):
- """Buat perbandingan ukuran sentralitas"""
- if not centrality_measures or not centrality_measures["degree"]:
- return go.Figure()
+ """Buat perbandingan ukuran sentralitas dengan error handling"""
+ try:
+ if not centrality_measures or not centrality_measures.get("degree"):
+ fig = go.Figure()
+ fig.add_annotation(
+ x=0.5,
+ y=0.5,
+ text="Tidak ada data sentralitas untuk dibandingkan",
+ showarrow=False,
+ font=dict(size=16),
+ )
+ return fig
+
+ nodes = list(centrality_measures["degree"].keys())
+
+ fig = make_subplots(
+ rows=2,
+ cols=2,
+ subplot_titles=(
+ "Degree Centrality",
+ "Betweenness Centrality",
+ "Closeness Centrality",
+ "Eigenvector Centrality",
+ ),
+ vertical_spacing=0.1,
+ )
- nodes = list(centrality_measures["degree"].keys())
-
- fig = make_subplots(
- rows=2,
- cols=2,
- subplot_titles=(
- "Degree Centrality",
- "Betweenness Centrality",
- "Closeness Centrality",
- "Eigenvector Centrality",
- ),
- vertical_spacing=0.1,
- )
+ measures = ["degree", "betweenness", "closeness", "eigenvector"]
+ positions = [(1, 1), (1, 2), (2, 1), (2, 2)]
- measures = ["degree", "betweenness", "closeness", "eigenvector"]
- positions = [(1, 1), (1, 2), (2, 1), (2, 2)]
+ for measure, (row, col) in zip(measures, positions):
+ values = [centrality_measures[measure].get(node, 0) for node in nodes]
- for measure, (row, col) in zip(measures, positions):
- values = [centrality_measures[measure][node] for node in nodes]
+ fig.add_trace(
+ go.Bar(x=nodes, y=values, name=measure.title()), row=row, col=col
+ )
- fig.add_trace(go.Bar(x=nodes, y=values, name=measure.title()), row=row, col=col)
+ fig.update_layout(
+ height=600,
+ showlegend=False,
+ title=dict(text="Perbandingan Ukuran Sentralitas", font=dict(size=16)),
+ )
+ return fig
- fig.update_layout(
- height=600,
- showlegend=False,
- title=dict(text="Perbandingan Ukuran Sentralitas", font=dict(size=16)),
- )
- return fig
+ except Exception as e:
+ st.error(f"Error creating centrality comparison: {str(e)}")
+ return go.Figure()
def create_map_visualization(gdf_original):
- """Buat visualisasi peta menggunakan Folium dengan penanganan MultiLineString"""
- if gdf_original is None or gdf_original.empty:
- return None
+ """Buat visualisasi peta menggunakan Folium dengan error handling"""
+ try:
+ if gdf_original is None or gdf_original.empty:
+ return None
- # Konversi ke WGS84 untuk visualisasi
- gdf_wgs84 = gdf_original.to_crs("EPSG:4326")
+ # Konversi ke WGS84 untuk visualisasi
+ gdf_wgs84 = gdf_original.to_crs("EPSG:4326")
- # Hitung centroid untuk center map
- bounds = gdf_wgs84.total_bounds
- center_lat = (bounds[1] + bounds[3]) / 2
- center_lon = (bounds[0] + bounds[2]) / 2
+ # Hitung centroid untuk center map
+ bounds = gdf_wgs84.total_bounds
+ center_lat = (bounds[1] + bounds[3]) / 2
+ center_lon = (bounds[0] + bounds[2]) / 2
- # Buat peta
- m = folium.Map(
- location=[center_lat, center_lon], zoom_start=12, tiles="OpenStreetMap"
- )
+ # Buat peta
+ m = folium.Map(
+ location=[center_lat, center_lon], zoom_start=12, tiles="OpenStreetMap"
+ )
+
+ # Tambahkan layer jaringan listrik
+ for idx, row in gdf_wgs84.iterrows():
+ geom = row.geometry
+ line_name = row.get("nama", f"Line_{idx}")
+ line_id = row.get("id", idx)
+
+ # Handle MultiLineString dan LineString
+ if geom.geom_type == "MultiLineString":
+ for i, line in enumerate(geom.geoms):
+ coords = [[lat, lon] for lon, lat in line.coords]
+
+ # Hitung panjang untuk popup
+ line_length_m = line.length * 111000 # Approximate conversion
+ line_length_km = line_length_m / 1000
+
+ popup_text = f"""
+ {line_name} - Segment {i+1}
+ ID: {line_id}
+ Panjang: {line_length_km:.3f} km
+ Tipe: MultiLineString
+ """
- # Tambahkan layer jaringan listrik
- for idx, row in gdf_wgs84.iterrows():
- geom = row.geometry
- line_name = row.get("nama", f"Line_{idx}")
- line_id = row.get("id", idx)
+ folium.PolyLine(
+ locations=coords,
+ color="red",
+ weight=2,
+ opacity=0.8,
+ popup=folium.Popup(popup_text, max_width=300),
+ ).add_to(m)
- # Handle MultiLineString dan LineString
- if geom.geom_type == "MultiLineString":
- for i, line in enumerate(geom.geoms):
- coords = [[lat, lon] for lon, lat in line.coords]
+ elif geom.geom_type == "LineString":
+ coords = [[lat, lon] for lon, lat in geom.coords]
# Hitung panjang untuk popup
- line_length_m = line.length * 111000 # Approximate conversion
+ line_length_m = geom.length * 111000 # Approximate conversion
line_length_km = line_length_m / 1000
popup_text = f"""
- {line_name} - Segment {i+1}
+ {line_name}
ID: {line_id}
Panjang: {line_length_km:.3f} km
- Tipe: MultiLineString
+ Tipe: LineString
"""
folium.PolyLine(
locations=coords,
- color="red",
+ color="blue",
weight=2,
opacity=0.8,
popup=folium.Popup(popup_text, max_width=300),
).add_to(m)
- elif geom.geom_type == "LineString":
- coords = [[lat, lon] for lon, lat in geom.coords]
-
- # Hitung panjang untuk popup
- line_length_m = geom.length * 111000 # Approximate conversion
- line_length_km = line_length_m / 1000
-
- popup_text = f"""
- {line_name}
- ID: {line_id}
- Panjang: {line_length_km:.3f} km
- Tipe: LineString
- """
+ # Tambahkan legend
+ legend_html = """
+
+
Legenda
+
MultiLineString
+
LineString
+
+ """
+ m.get_root().html.add_child(folium.Element(legend_html))
- folium.PolyLine(
- locations=coords,
- color="blue",
- weight=2,
- opacity=0.8,
- popup=folium.Popup(popup_text, max_width=300),
- ).add_to(m)
-
- # Tambahkan legend
- legend_html = """
-
-
Legenda
-
MultiLineString
-
LineString
-
- """
- m.get_root().html.add_child(folium.Element(legend_html))
+ return m
- return m
+ except Exception as e:
+ st.error(f"Error creating map visualization: {str(e)}")
+ return None
-# Main App
+# === MAIN APPLICATION ===
def main():
st.markdown(
'⚡ Analisis Keterhubungan Jaringan Listrik DIY
',
@@ -636,6 +776,15 @@ def main():
# Sidebar
st.sidebar.title("🔧 Konfigurasi Analisis")
+ # Add environment info for debugging
+ with st.sidebar.expander("🔍 Debug Info"):
+ st.write("Environment Variables:")
+ st.write(f"HOME: {os.environ.get('HOME', 'Not set')}")
+ st.write(f"MPLCONFIGDIR: {os.environ.get('MPLCONFIGDIR', 'Not set')}")
+ st.write(
+ f"STREAMLIT_CONFIG_DIR: {os.environ.get('STREAMLIT_CONFIG_DIR', 'Not set')}"
+ )
+
# Pilihan sumber data
data_source = st.sidebar.radio(
"Pilih Sumber Data:",
@@ -653,7 +802,11 @@ def main():
if uploaded_file is not None:
with st.spinner("Memproses file yang diupload..."):
- gdf = process_uploaded_data(uploaded_file)
+ gdf = safe_file_processing(uploaded_file)
+ if gdf is not None:
+ st.sidebar.success(
+ f"✅ File berhasil diproses: {len(gdf)} features"
+ )
elif data_source == "💾 File Lokal":
zip_path = st.sidebar.text_input(
@@ -665,7 +818,27 @@ def main():
if zip_path and st.sidebar.button("📂 Load File Lokal"):
if os.path.exists(zip_path):
with st.spinner("Memuat file lokal..."):
- gdf = load_local_zip_data(zip_path)
+ try:
+ with zipfile.ZipFile(zip_path, "r") as zip_file:
+ with tempfile.TemporaryDirectory() as temp_dir:
+ zip_file.extractall(temp_dir)
+ shp_files = [
+ f
+ for f in os.listdir(temp_dir)
+ if f.endswith(".shp")
+ ]
+ if shp_files:
+ shp_path = os.path.join(temp_dir, shp_files[0])
+ gdf = gpd.read_file(shp_path)
+ st.sidebar.success(
+ f"✅ File lokal berhasil dimuat: {len(gdf)} features"
+ )
+ else:
+ st.sidebar.error(
+ "File shapefile tidak ditemukan dalam ZIP"
+ )
+ except Exception as e:
+ st.sidebar.error(f"Error memuat file lokal: {str(e)}")
else:
st.sidebar.error("File tidak ditemukan!")
@@ -676,29 +849,18 @@ def main():
)
if st.sidebar.button("🌐 Download dari URL"):
- with st.spinner("Mengunduh dan memproses data..."):
- # Download data (fungsi lama)
- try:
- response = requests.get(data_url, timeout=30)
- response.raise_for_status()
-
- with zipfile.ZipFile(io.BytesIO(response.content)) as zip_file:
- with tempfile.TemporaryDirectory() as temp_dir:
- zip_file.extractall(temp_dir)
- shp_files = [
- f for f in os.listdir(temp_dir) if f.endswith(".shp")
- ]
- if shp_files:
- shp_path = os.path.join(temp_dir, shp_files[0])
- gdf = gpd.read_file(shp_path)
- else:
- st.error("File shapefile tidak ditemukan")
- except Exception as e:
- st.error(f"Error mengunduh data: {str(e)}")
+ gdf = safe_url_download(data_url)
+ if gdf is not None:
+ st.sidebar.success(f"✅ Data berhasil diunduh: {len(gdf)} features")
# Konfigurasi visualisasi
show_labels = st.sidebar.checkbox("Tampilkan Label Node", value=False)
+ # Add performance settings
+ with st.sidebar.expander("⚙️ Pengaturan Performa"):
+ max_nodes_viz = st.slider("Max nodes untuk visualisasi", 50, 1000, 500)
+ use_cache = st.checkbox("Gunakan cache untuk analisis", value=True)
+
# Proses data jika sudah dimuat
if gdf is not None and not gdf.empty:
if (
@@ -706,47 +868,71 @@ def main():
or "gdf" not in st.session_state
):
with st.spinner("Memproses analisis graf..."):
- st.session_state["gdf"] = gdf
+ try:
+ st.session_state["gdf"] = gdf
+
+ # Buat graf jaringan
+ G, nodes, gdf_utm, line_segments = create_network_graph(gdf)
+
+ # Limit nodes for visualization if too many
+ if G.number_of_nodes() > max_nodes_viz:
+ st.warning(
+ f"⚠️ Graf memiliki {G.number_of_nodes()} nodes. Visualisasi dibatasi pada {max_nodes_viz} nodes untuk performa."
+ )
+ # Create subgraph with highest degree nodes for visualization
+ degrees = dict(G.degree())
+ top_nodes = sorted(
+ degrees.keys(), key=lambda x: degrees[x], reverse=True
+ )[:max_nodes_viz]
+ G_viz = G.subgraph(top_nodes).copy()
+ st.session_state["G_viz"] = G_viz
+ else:
+ st.session_state["G_viz"] = G
- # Buat graf jaringan
- G, nodes, gdf_utm, line_segments = create_network_graph(gdf)
- st.session_state["G"] = G
- st.session_state["nodes"] = nodes
- st.session_state["gdf_utm"] = gdf_utm
- st.session_state["line_segments"] = line_segments
+ st.session_state["G"] = G
+ st.session_state["nodes"] = nodes
+ st.session_state["gdf_utm"] = gdf_utm
+ st.session_state["line_segments"] = line_segments
- # Hitung ukuran sentralitas
- centrality_measures = calculate_centrality_measures(G)
- st.session_state["centrality"] = centrality_measures
+ # Hitung ukuran sentralitas
+ centrality_measures = calculate_centrality_measures(G)
+ st.session_state["centrality"] = centrality_measures
- # Analisis konektivitas
- connectivity_analysis = analyze_network_connectivity(G, line_segments)
- st.session_state["connectivity"] = connectivity_analysis
+ # Analisis konektivitas
+ connectivity_analysis = analyze_network_connectivity(
+ G, line_segments
+ )
+ st.session_state["connectivity"] = connectivity_analysis
+
+ # MST
+ mst = find_minimum_spanning_tree(G)
+ st.session_state["mst"] = mst
- # MST
- mst = find_minimum_spanning_tree(G)
- st.session_state["mst"] = mst
+ st.success("✅ Data berhasil diproses!")
- st.success("✅ Data berhasil diproses!")
+ except Exception as e:
+ st.error(f"❌ Error dalam analisis: {str(e)}")
+ st.info(
+ "Silakan coba dengan file data yang berbeda atau periksa format data."
+ )
# Tampilkan hasil jika data sudah dimuat
if "gdf" in st.session_state:
gdf = st.session_state["gdf"]
G = st.session_state["G"]
+ G_viz = st.session_state.get("G_viz", G)
centrality_measures = st.session_state["centrality"]
connectivity_analysis = st.session_state["connectivity"]
mst = st.session_state["mst"]
# Tab layout
- tab1, tab2, tab3, tab4, tab5, tab6, tab7 = st.tabs(
+ tab1, tab2, tab3, tab4, tab5 = st.tabs(
[
"📊 Overview",
"🗺️ Peta Jaringan",
"📈 Analisis Graf",
"🎯 Sentralitas",
"🌳 MST Analysis",
- "🔍 Debug Info",
- "🧮 Advanced Algorithms",
]
)
@@ -856,14 +1042,6 @@ def main():
f"- {len([s for s in component_sizes if s == 1])} node terisolasi perlu perhatian"
)
- # Analisis atribut data
- st.markdown("### 🏷️ Analisis Atribut Data")
- if "nama" in gdf.columns:
- nama_counts = gdf["nama"].value_counts()
- st.write("**Distribusi Nama Jaringan:**")
- for nama, count in nama_counts.head(10).items():
- st.write(f"- {nama}: {count} segmen")
-
# Tampilkan sample data
st.markdown("### 📄 Sample Data")
# Tampilkan kolom yang relevan
@@ -881,11 +1059,14 @@ def main():
)
# Buat peta
- map_viz = create_map_visualization(gdf)
- if map_viz:
- st_folium(map_viz, width=700, height=500)
- else:
- st.error("Tidak dapat membuat visualisasi peta")
+ try:
+ map_viz = create_map_visualization(gdf)
+ if map_viz:
+ st_folium(map_viz, width=700, height=500)
+ else:
+ st.error("Tidak dapat membuat visualisasi peta")
+ except Exception as e:
+ st.error(f"Error creating map: {str(e)}")
with tab3:
st.markdown(
@@ -893,11 +1074,20 @@ def main():
unsafe_allow_html=True,
)
+ # Performance warning
+ if G.number_of_nodes() > max_nodes_viz:
+ st.info(
+ f"ℹ️ Menampilkan {max_nodes_viz} node dengan degree tertinggi dari total {G.number_of_nodes()} nodes"
+ )
+
# Visualisasi graf
- network_fig = create_network_visualization(
- G, st.session_state["nodes"], centrality_measures, show_labels
- )
- st.plotly_chart(network_fig, use_container_width=True)
+ try:
+ network_fig = create_network_visualization(
+ G_viz, st.session_state["nodes"], centrality_measures, show_labels
+ )
+ st.plotly_chart(network_fig, use_container_width=True)
+ except Exception as e:
+ st.error(f"Error creating network visualization: {str(e)}")
# Informasi graf
st.markdown("### 🔍 Interpretasi Graf")
@@ -917,13 +1107,16 @@ def main():
)
# Perbandingan sentralitas
- centrality_fig = create_centrality_comparison(centrality_measures)
- st.plotly_chart(centrality_fig, use_container_width=True)
+ try:
+ centrality_fig = create_centrality_comparison(centrality_measures)
+ st.plotly_chart(centrality_fig, use_container_width=True)
+ except Exception as e:
+ st.error(f"Error creating centrality comparison: {str(e)}")
# Identifikasi node kritis
st.markdown("### 🎯 Identifikasi Node Kritis")
- if centrality_measures["degree"]:
+ if centrality_measures.get("degree"):
# Top nodes berdasarkan degree centrality
degree_sorted = sorted(
centrality_measures["degree"].items(),
@@ -938,2101 +1131,74 @@ def main():
# Rekomendasi
st.markdown("### 💡 Rekomendasi")
- st.markdown(
- f"""
- **Node Paling Kritis:** Node {top_nodes[0][0]} (Degree Centrality: {top_nodes[0][1]:.4f})
-
- **Rekomendasi Kebijakan:**
- 1. **Prioritas Pemeliharaan**: Fokuskan pemeliharaan pada node dengan centrality tinggi
- 2. **Redundansi**: Pertimbangkan jalur alternatif untuk node kritis
- 3. **Monitoring**: Pasang sistem monitoring khusus pada node dengan degree centrality > 0.1
- 4. **Investasi**: Alokasikan investasi infrastruktur pada area dengan connectivity rendah
- """
- )
-
- with tab5:
- st.markdown(
- '',
- unsafe_allow_html=True,
- )
-
- # Sub-tabs untuk MST algorithms
- mst_tab1, mst_tab2, mst_tab3 = st.tabs(
- ["🌿 Kruskal", "🔄 Boruvka", "🎯 Prim"]
- )
-
- with mst_tab1:
- st.markdown("### 🌿 Algoritma Kruskal")
- st.markdown(
- """
- **Kruskal** membangun MST dengan memilih edge dengan bobot terkecil secara greedy,
- menggunakan Union-Find untuk menghindari cycle.
- """
- )
-
- if mst.number_of_nodes() > 0:
- col1, col2 = st.columns(2)
- with col1:
- total_weight = sum(
- [data["weight"] for _, _, data in mst.edges(data=True)]
- )
- st.metric("Total Bobot Kruskal MST", f"{total_weight:.2f}m")
- st.metric("Jumlah Edge", mst.number_of_edges())
- with col2:
- original_weight = sum(
- [data["weight"] for _, _, data in G.edges(data=True)]
- )
- efficiency = (
- ((original_weight - total_weight) / original_weight * 100)
- if original_weight > 0
- else 0
- )
- st.metric("Efisiensi", f"{efficiency:.2f}%")
- st.metric(
- "Penghematan", f"{original_weight - total_weight:.2f}m"
- )
-
- # Visualisasi Kruskal MST
- mst_centrality = calculate_centrality_measures(mst)
- mst_fig_kruskal = create_network_visualization(
- mst, st.session_state["nodes"], mst_centrality, show_labels
- )
- mst_fig_kruskal.update_layout(
- title=dict(
- text="Kruskal MST - Minimum Spanning Tree",
- font=dict(size=16),
- )
- )
- st.plotly_chart(mst_fig_kruskal, use_container_width=True)
-
- st.markdown("### 🔧 Interpretasi Kruskal MST")
+ if top_nodes:
st.markdown(
- """
- - Edge dipilih berdasarkan bobot terkecil
- - Menghindari cycle menggunakan Union-Find
- - Optimal untuk graf sparse dengan edge weights yang bervariasi
+ f"""
+ **Node Paling Kritis:** Node {top_nodes[0][0]} (Degree Centrality: {top_nodes[0][1]:.4f})
+
+ **Rekomendasi Kebijakan:**
+ 1. **Prioritas Pemeliharaan**: Fokuskan pemeliharaan pada node dengan centrality tinggi
+ 2. **Redundansi**: Pertimbangkan jalur alternatif untuk node kritis
+ 3. **Monitoring**: Pasang sistem monitoring khusus pada node dengan degree centrality > 0.1
+ 4. **Investasi**: Alokasikan investasi infrastruktur pada area dengan connectivity rendah
"""
)
- with mst_tab2:
- st.markdown("### 🔄 Algoritma Boruvka")
- st.markdown(
- """
- **Boruvka** membangun MST dengan cara paralel - setiap komponen secara simultan
- memilih edge minimum yang keluar dari komponen tersebut.
- """
- )
-
- if G.number_of_edges() > 0:
- try:
- boruvka_mst = nx.minimum_spanning_tree(
- G, algorithm="boruvka", weight="weight"
- )
-
- col1, col2 = st.columns(2)
- with col1:
- boruvka_weight = sum(
- [
- data["weight"]
- for _, _, data in boruvka_mst.edges(data=True)
- ]
- )
- st.metric(
- "Total Bobot Boruvka MST", f"{boruvka_weight:.2f}m"
- )
- st.metric("Jumlah Edge", boruvka_mst.number_of_edges())
- with col2:
- if "total_weight" in locals():
- diff = abs(boruvka_weight - total_weight)
- st.metric("Selisih dengan Kruskal", f"{diff:.2f}m")
- same_result = (
- "✅ Identik" if diff < 0.01 else "❌ Berbeda"
- )
- st.metric("Hasil vs Kruskal", same_result)
-
- # Visualisasi Boruvka MST
- boruvka_centrality = calculate_centrality_measures(boruvka_mst)
- boruvka_fig = create_network_visualization(
- boruvka_mst,
- st.session_state["nodes"],
- boruvka_centrality,
- show_labels,
- )
- boruvka_fig.update_layout(
- title=dict(
- text="Boruvka MST - Parallel Construction",
- font=dict(size=16),
- )
- )
- st.plotly_chart(boruvka_fig, use_container_width=True)
-
- st.markdown("### ⚡ Keunggulan Boruvka")
- st.markdown(
- """
- - Dapat diparalelkan untuk komputasi high-performance
- - Baik untuk graf dengan banyak komponen terpisah
- - Kompleksitas waktu O(E log V) seperti Kruskal
- """
- )
-
- except Exception as e:
- st.error(f"Error computing Boruvka MST: {str(e)}")
-
- with mst_tab3:
- st.markdown("### 🎯 Algoritma Prim")
- st.markdown(
- """
- **Prim** membangun MST dengan memulai dari satu node, kemudian secara greedy
- menambahkan edge dengan bobot minimum yang menghubungkan ke node baru.
- """
- )
-
- if G.number_of_edges() > 0:
- try:
- prim_mst = nx.minimum_spanning_tree(
- G, algorithm="prim", weight="weight"
- )
-
- col1, col2 = st.columns(2)
- with col1:
- prim_weight = sum(
- [
- data["weight"]
- for _, _, data in prim_mst.edges(data=True)
- ]
- )
- st.metric("Total Bobot Prim MST", f"{prim_weight:.2f}m")
- st.metric("Jumlah Edge", prim_mst.number_of_edges())
- with col2:
- if "total_weight" in locals():
- diff = abs(prim_weight - total_weight)
- st.metric("Selisih dengan Kruskal", f"{diff:.2f}m")
- same_result = (
- "✅ Identik" if diff < 0.01 else "❌ Berbeda"
- )
- st.metric("Hasil vs Kruskal", same_result)
-
- # Visualisasi Prim MST
- prim_centrality = calculate_centrality_measures(prim_mst)
- prim_fig = create_network_visualization(
- prim_mst,
- st.session_state["nodes"],
- prim_centrality,
- show_labels,
- )
- prim_fig.update_layout(
- title=dict(
- text="Prim MST - Greedy Node Expansion",
- font=dict(size=16),
- )
- )
- st.plotly_chart(prim_fig, use_container_width=True)
-
- # Perbandingan ketiga algoritma MST
- if "boruvka_weight" in locals() and "total_weight" in locals():
- st.markdown("### 📊 Perbandingan MST Algorithms")
- comparison_data = {
- "Algorithm": ["Kruskal", "Boruvka", "Prim"],
- "Total Weight (m)": [
- total_weight,
- boruvka_weight,
- prim_weight,
- ],
- "Edges Count": [
- mst.number_of_edges(),
- boruvka_mst.number_of_edges(),
- prim_mst.number_of_edges(),
- ],
- "Time Complexity": [
- "O(E log V)",
- "O(E log V)",
- "O(E log V)",
- ],
- "Space Complexity": ["O(V)", "O(V)", "O(V)"],
- }
- comparison_df = pd.DataFrame(comparison_data)
- st.dataframe(comparison_df, use_container_width=True)
-
- # Chart perbandingan
- fig_comparison = px.bar(
- comparison_df,
- x="Algorithm",
- y="Total Weight (m)",
- title="Perbandingan Total Weight MST Algorithms",
- color="Algorithm",
- )
- st.plotly_chart(fig_comparison, use_container_width=True)
-
- st.markdown("### 🌟 Karakteristik Prim")
- st.markdown(
- """
- - Mulai dari satu node dan expand secara greedy
- - Baik untuk dense graph dengan adjacency matrix
- - Mudah diimplementasi dengan priority queue
- """
- )
-
- except Exception as e:
- st.error(f"Error computing Prim MST: {str(e)}")
-
- # STEP 3: Tambahkan tab6 (Debug Info) setelah tab5:
- with tab6:
- st.markdown(
- '',
- unsafe_allow_html=True,
- )
-
- col1, col2 = st.columns(2)
-
- with col1:
- st.markdown("### 🐛 Statistik Geometri")
- geom_types = gdf.geom_type.value_counts()
- st.write("**Tipe Geometri:**")
- for geom_type, count in geom_types.items():
- st.write(f"- {geom_type}: {count}")
-
- # Analisis koordinat
- if "line_segments" in st.session_state:
- segments = st.session_state["line_segments"]
- lengths = [seg["length_m"] for seg in segments]
- st.markdown("### 📏 Distribusi Panjang Segmen")
- st.write(f"- Segmen terpendek: {min(lengths):.1f}m")
- st.write(f"- Segmen terpanjang: {max(lengths):.1f}m")
- st.write(f"- Median: {np.median(lengths):.1f}m")
- st.write(f"- Std deviation: {np.std(lengths):.1f}m")
-
- # Quartiles
- q1, q3 = np.percentile(lengths, [25, 75])
- st.write(f"- Q1 (25%): {q1:.1f}m")
- st.write(f"- Q3 (75%): {q3:.1f}m")
-
- with col2:
- st.markdown("### 🎯 Node dengan Degree Tinggi")
- high_degree = [(n, d) for n, d in G.degree() if d > 4]
- st.write(f"**Node dengan degree > 4:** {len(high_degree)}")
-
- if high_degree:
- st.write("**Top Hub Nodes:**")
- sorted_hubs = sorted(high_degree, key=lambda x: x[1], reverse=True)
- for node, degree in sorted_hubs[:10]:
- st.write(f"- Node {node}: {degree} koneksi")
-
- # Analisis komponen detail
- if connectivity_analysis["num_components"] > 1:
- st.markdown("### 🔗 Detail Setiap Komponen")
- components = list(nx.connected_components(G))
- for i, comp in enumerate(components[:10]): # Show top 10
- st.write(f"- Komponen {i+1}: {len(comp)} nodes")
- if len(comp) <= 5: # Show nodes for small components
- nodes_str = ", ".join([str(n) for n in list(comp)[:5]])
- st.write(f" Nodes: {nodes_str}")
-
- # Histogram distribusi degree
- st.markdown("### 📊 Distribusi Degree Nodes")
- degrees = [d for n, d in G.degree()]
-
- col1, col2 = st.columns(2)
- with col1:
- fig_hist = px.histogram(
- x=degrees,
- nbins=min(20, max(degrees)),
- title="Distribusi Degree dalam Jaringan",
- labels={"x": "Degree", "y": "Jumlah Node"},
- )
- st.plotly_chart(fig_hist, use_container_width=True)
-
- with col2:
- # Box plot degree distribution
- fig_box = px.box(
- y=degrees,
- title="Box Plot - Distribusi Degree",
- labels={"y": "Degree"},
- )
- st.plotly_chart(fig_box, use_container_width=True)
-
- # Network properties analysis
- st.markdown("### 🔍 Analisis Properti Jaringan")
- col1, col2, col3 = st.columns(3)
-
- with col1:
- st.markdown("**Clustering:**")
- try:
- clustering = nx.average_clustering(G)
- st.write(f"- Average clustering: {clustering:.4f}")
-
- # Local clustering untuk top nodes
- degrees = dict(G.degree())
- top_5_nodes = sorted(
- degrees.keys(), key=lambda x: degrees[x], reverse=True
- )[:5]
- st.write("- Top nodes clustering:")
- for node in top_5_nodes:
- local_clustering = nx.clustering(G, node)
- st.write(f" Node {node}: {local_clustering:.3f}")
- except:
- st.write("- Clustering: N/A")
-
- with col2:
- st.markdown("**Assortativity:**")
- try:
- degree_assortativity = nx.degree_assortativity_coefficient(G)
- st.write(f"- Degree assortativity: {degree_assortativity:.4f}")
-
- if degree_assortativity > 0:
- st.write("- Tendency: Assortative (similar nodes connect)")
- elif degree_assortativity < 0:
- st.write("- Tendency: Disassortative (different nodes connect)")
- else:
- st.write("- Tendency: Neutral")
- except:
- st.write("- Assortativity: N/A")
-
- with col3:
- st.markdown("**Efficiency:**")
- try:
- if connectivity_analysis["is_connected"]:
- global_efficiency = nx.global_efficiency(G)
- st.write(f"- Global efficiency: {global_efficiency:.4f}")
- else:
- st.write("- Global efficiency: N/A (disconnected)")
-
- local_efficiency = nx.local_efficiency(G)
- st.write(f"- Local efficiency: {local_efficiency:.4f}")
- except:
- st.write("- Efficiency: N/A")
-
- # STEP 4: Tambahkan tab7 (Advanced Algorithms) setelah tab6:
- with tab7:
+ with tab5:
st.markdown(
- '',
+ '',
unsafe_allow_html=True,
)
- # Sub-tabs untuk advanced algorithms
- adv_tab1, adv_tab2, adv_tab3, adv_tab4, adv_tab5, adv_tab6 = st.tabs(
- [
- "🛣️ Shortest Path",
- "💰 Min Cost Route",
- "🔄 Sollin",
- "🌐 Grid Analysis",
- "🚛 TSP",
- "🎲 Generative Model",
- ]
- )
-
- with adv_tab1:
- st.markdown("### 🛣️ Shortest Path Tree Analysis")
-
- if G.number_of_nodes() > 0:
- # Pilih source node
- degrees = dict(G.degree())
- source_node = max(degrees.keys(), key=lambda x: degrees[x])
-
- selected_source = st.selectbox(
- "Pilih Source Node:",
- options=list(G.nodes()),
- index=list(G.nodes()).index(source_node),
- help="Node dengan degree tertinggi dipilih sebagai default",
- )
-
- dijkstra_tab, bellman_tab = st.tabs(
- ["🎯 Dijkstra", "⚖️ Bellman-Ford"]
- )
-
- with dijkstra_tab:
- st.markdown(
- f"**Dijkstra Shortest Path dari Node {selected_source}**"
- )
-
- try:
- # Dijkstra shortest path tree
- dijkstra_lengths = nx.single_source_dijkstra_path_length(
- G, selected_source, weight="weight"
- )
- dijkstra_paths = nx.single_source_dijkstra_path(
- G, selected_source, weight="weight"
- )
-
- # Buat shortest path tree
- spt_dijkstra = nx.Graph()
- for target, path in dijkstra_paths.items():
- for i in range(len(path) - 1):
- u, v = path[i], path[i + 1]
- if G.has_edge(u, v):
- edge_data = G[u][v]
- spt_dijkstra.add_edge(u, v, **edge_data)
-
- # Copy node attributes
- for node in spt_dijkstra.nodes():
- if node in G.nodes():
- spt_dijkstra.nodes[node].update(G.nodes[node])
-
- col1, col2 = st.columns(2)
- with col1:
- avg_distance = np.mean(list(dijkstra_lengths.values()))
- st.metric("Rata-rata Jarak", f"{avg_distance:.2f}m")
- st.metric(
- "Jarak Terjauh",
- f"{max(dijkstra_lengths.values()):.2f}m",
- )
- with col2:
- st.metric("Node Terjangkau", len(dijkstra_lengths))
- coverage = (
- len(dijkstra_lengths) / G.number_of_nodes() * 100
- )
- st.metric("Coverage", f"{coverage:.1f}%")
-
- # Top 10 terjauh
- st.markdown("**Node Terjauh dari Source:**")
- farthest = sorted(
- dijkstra_lengths.items(),
- key=lambda x: x[1],
- reverse=True,
- )[:10]
- for i, (node, dist) in enumerate(farthest, 1):
- st.write(f"{i}. Node {node}: {dist:.2f}m")
-
- # Visualisasi SPT Dijkstra
- if spt_dijkstra.number_of_nodes() > 0:
- spt_centrality = calculate_centrality_measures(
- spt_dijkstra
- )
- spt_fig = create_network_visualization(
- spt_dijkstra,
- st.session_state["nodes"],
- spt_centrality,
- show_labels,
- )
- spt_fig.update_layout(
- title=dict(
- text=f"Dijkstra Shortest Path Tree (Source: {selected_source})",
- font=dict(size=16),
- )
- )
- st.plotly_chart(spt_fig, use_container_width=True)
-
- except Exception as e:
- st.error(f"Error computing Dijkstra: {str(e)}")
-
- with bellman_tab:
- st.markdown(
- f"**Bellman-Ford Shortest Path dari Node {selected_source}**"
- )
-
- try:
- # Bellman-Ford shortest path
- bf_lengths = nx.single_source_bellman_ford_path_length(
- G, selected_source, weight="weight"
- )
- bf_paths = nx.single_source_bellman_ford_path(
- G, selected_source, weight="weight"
- )
-
- # Buat shortest path tree
- spt_bellman = nx.Graph()
- for target, path in bf_paths.items():
- for i in range(len(path) - 1):
- u, v = path[i], path[i + 1]
- if G.has_edge(u, v):
- edge_data = G[u][v]
- spt_bellman.add_edge(u, v, **edge_data)
-
- # Copy node attributes
- for node in spt_bellman.nodes():
- if node in G.nodes():
- spt_bellman.nodes[node].update(G.nodes[node])
-
- col1, col2 = st.columns(2)
- with col1:
- avg_distance_bf = np.mean(list(bf_lengths.values()))
- st.metric("Rata-rata Jarak", f"{avg_distance_bf:.2f}m")
- st.metric(
- "Jarak Terjauh", f"{max(bf_lengths.values()):.2f}m"
- )
- with col2:
- # Perbandingan dengan Dijkstra
- if "avg_distance" in locals():
- diff = abs(avg_distance_bf - avg_distance)
- st.metric("Selisih dengan Dijkstra", f"{diff:.2f}m")
- st.metric("Node Terjangkau", len(bf_lengths))
-
- # Deteksi negative cycle
- st.markdown("**Negative Cycle Detection:**")
- try:
- nx.single_source_bellman_ford_path_length(
- G, selected_source, weight="weight"
- )
- st.success("✅ Tidak ada negative cycle terdeteksi")
- except nx.NetworkXUnbounded:
- st.error("❌ Negative cycle terdeteksi!")
-
- # Visualisasi SPT Bellman-Ford
- if spt_bellman.number_of_nodes() > 0:
- spt_bf_centrality = calculate_centrality_measures(
- spt_bellman
- )
- spt_bf_fig = create_network_visualization(
- spt_bellman,
- st.session_state["nodes"],
- spt_bf_centrality,
- show_labels,
- )
- spt_bf_fig.update_layout(
- title=dict(
- text=f"Bellman-Ford Shortest Path Tree (Source: {selected_source})",
- font=dict(size=16),
- )
- )
- st.plotly_chart(spt_bf_fig, use_container_width=True)
-
- except Exception as e:
- st.error(f"Error computing Bellman-Ford: {str(e)}")
-
- with adv_tab2:
- st.markdown("### 💰 Minimum Route Cost Tree")
- st.markdown(
- """
- Analisis biaya minimum untuk routing dalam jaringan listrik,
- mempertimbangkan faktor biaya operasional dan losses.
- """
- )
-
- # Cost parameters
+ if mst.number_of_nodes() > 0:
col1, col2 = st.columns(2)
with col1:
- cost_factor = st.slider(
- "Cost Factor (losses per km)", 0.01, 0.1, 0.05, 0.01
+ total_weight = sum(
+ [data["weight"] for _, _, data in mst.edges(data=True)]
)
+ st.metric("Total Bobot MST", f"{total_weight:.2f}m")
+ st.metric("Jumlah Edge", mst.number_of_edges())
with col2:
- maintenance_cost = st.slider(
- "Maintenance Cost per km", 0.001, 0.01, 0.005, 0.001
+ original_weight = sum(
+ [data["weight"] for _, _, data in G.edges(data=True)]
+ )
+ efficiency = (
+ ((original_weight - total_weight) / original_weight * 100)
+ if original_weight > 0
+ else 0
)
+ st.metric("Efisiensi", f"{efficiency:.2f}%")
+ st.metric("Penghematan", f"{original_weight - total_weight:.2f}m")
+ # Visualisasi MST
try:
- # Buat graf dengan cost sebagai weight
- cost_graph = G.copy()
- for u, v, data in cost_graph.edges(data=True):
- length_km = data["weight"] / 1000 # Convert to km
- operational_cost = length_km * cost_factor
- maintenance_cost_total = length_km * maintenance_cost
- total_cost = operational_cost + maintenance_cost_total
- cost_graph[u][v]["cost"] = total_cost
- cost_graph[u][v]["operational"] = operational_cost
- cost_graph[u][v]["maintenance"] = maintenance_cost_total
-
- # Minimum cost spanning tree
- cost_mst = nx.minimum_spanning_tree(cost_graph, weight="cost")
-
- col1, col2, col3 = st.columns(3)
- with col1:
- total_cost = sum(
- [data["cost"] for _, _, data in cost_mst.edges(data=True)]
- )
- st.metric("Total Annual Cost", f"${total_cost:.2f}")
- st.metric("Edges in Cost MST", cost_mst.number_of_edges())
- with col2:
- operational_total = sum(
- [
- data["operational"]
- for _, _, data in cost_mst.edges(data=True)
- ]
- )
- maintenance_total = sum(
- [
- data["maintenance"]
- for _, _, data in cost_mst.edges(data=True)
- ]
- )
- st.metric("Operational Cost", f"${operational_total:.2f}")
- st.metric("Maintenance Cost", f"${maintenance_total:.2f}")
- with col3:
- original_cost = sum(
- [data["cost"] for _, _, data in cost_graph.edges(data=True)]
- )
- cost_efficiency = (
- ((original_cost - total_cost) / original_cost * 100)
- if original_cost > 0
- else 0
- )
- st.metric("Cost Efficiency", f"{cost_efficiency:.2f}%")
- st.metric(
- "Annual Savings", f"${original_cost - total_cost:.2f}"
- )
-
- # Cost breakdown pie chart
- fig_cost = px.pie(
- values=[operational_total, maintenance_total],
- names=["Operational", "Maintenance"],
- title="Cost Breakdown - MST",
+ mst_centrality = calculate_centrality_measures(mst)
+ mst_fig = create_network_visualization(
+ mst, st.session_state["nodes"], mst_centrality, show_labels
)
- st.plotly_chart(fig_cost, use_container_width=True)
-
- # Visualisasi Cost MST
- if cost_mst.number_of_nodes() > 0:
- cost_centrality = calculate_centrality_measures(cost_mst)
- cost_fig = create_network_visualization(
- cost_mst,
- st.session_state["nodes"],
- cost_centrality,
- show_labels,
- )
- cost_fig.update_layout(
- title=dict(
- text="Minimum Route Cost Tree", font=dict(size=16)
- )
+ mst_fig.update_layout(
+ title=dict(
+ text="Minimum Spanning Tree - Jaringan Optimal",
+ font=dict(size=16),
)
- st.plotly_chart(cost_fig, use_container_width=True)
-
+ )
+ st.plotly_chart(mst_fig, use_container_width=True)
except Exception as e:
- st.error(f"Error computing cost tree: {str(e)}")
+ st.error(f"Error creating MST visualization: {str(e)}")
- with adv_tab3:
- st.markdown("### 🔄 Sollin Algorithm (Advanced Boruvka)")
+ st.markdown("### 🔧 Interpretasi MST")
st.markdown(
"""
- **Sollin** adalah varian Boruvka dengan implementasi yang lebih detail
- untuk menunjukkan proses step-by-step pembentukan MST.
+ - MST menunjukkan jaringan dengan biaya minimum yang tetap menghubungkan semua node
+ - Berguna untuk perencanaan infrastruktur baru atau optimasi jaringan existing
+ - Edge yang tidak termasuk dalam MST bisa dianggap sebagai redundansi
"""
)
-
- try:
- # Implementasi Sollin step-by-step simulation
- st.markdown("**Simulasi Step-by-step Sollin Algorithm:**")
-
- # Initialize dengan setiap node sebagai komponen terpisah
- components_history = []
- temp_mst = nx.Graph()
- temp_mst.add_nodes_from(G.nodes(data=True))
-
- step = 0
- max_steps = 10
-
- while (
- temp_mst.number_of_edges() < G.number_of_nodes() - 1
- and step < max_steps
- ):
- step += 1
- components = list(nx.connected_components(temp_mst))
-
- if len(components) <= 1:
- break
-
- step_info = {
- "step": step,
- "num_components": len(components),
- "component_sizes": [len(comp) for comp in components],
- "edges_added": 0,
- }
-
- # Untuk setiap komponen, cari edge minimum yang keluar
- edges_to_add = []
- for component in components:
- min_edge = None
- min_weight = float("inf")
-
- for node in component:
- for neighbor in G.neighbors(node):
- if (
- neighbor not in component
- ): # Edge keluar dari komponen
- weight = G[node][neighbor]["weight"]
- if weight < min_weight:
- min_weight = weight
- min_edge = (
- node,
- neighbor,
- G[node][neighbor],
- )
-
- if min_edge and min_edge not in edges_to_add:
- # Cek apakah edge reverse sudah ada
- reverse_edge = (min_edge[1], min_edge[0], min_edge[2])
- if reverse_edge not in edges_to_add:
- edges_to_add.append(min_edge)
-
- # Tambahkan edges ke MST
- for u, v, data in edges_to_add:
- if not temp_mst.has_edge(u, v):
- temp_mst.add_edge(u, v, **data)
- step_info["edges_added"] += 1
-
- components_history.append(step_info)
-
- if len(components) <= 1:
- break
-
- # Tampilkan history
- col1, col2 = st.columns(2)
- with col1:
- st.markdown("**Step History:**")
- for info in components_history:
- st.write(
- f"Step {info['step']}: {info['num_components']} komponen → +{info['edges_added']} edges"
- )
-
- with col2:
- if components_history:
- final_weight = sum(
- [
- data["weight"]
- for _, _, data in temp_mst.edges(data=True)
- ]
- )
- st.metric("Final MST Weight", f"{final_weight:.2f}m")
- st.metric("Total Steps", len(components_history))
- st.metric("Final Edges", temp_mst.number_of_edges())
-
- # Visualisasi progression
- if components_history:
- steps = [info["step"] for info in components_history]
- num_components = [
- info["num_components"] for info in components_history
- ]
-
- fig_progression = px.line(
- x=steps,
- y=num_components,
- title="Sollin Algorithm - Component Reduction",
- labels={"x": "Step", "y": "Number of Components"},
- markers=True,
- )
- st.plotly_chart(fig_progression, use_container_width=True)
-
- # Visualisasi final Sollin MST
- if temp_mst.number_of_nodes() > 0:
- sollin_centrality = calculate_centrality_measures(temp_mst)
- sollin_fig = create_network_visualization(
- temp_mst,
- st.session_state["nodes"],
- sollin_centrality,
- show_labels,
- )
- sollin_fig.update_layout(
- title=dict(
- text="Sollin MST - Step-by-step Result",
- font=dict(size=16),
- )
- )
- st.plotly_chart(sollin_fig, use_container_width=True)
-
- except Exception as e:
- st.error(f"Error in Sollin simulation: {str(e)}")
-
- with adv_tab4:
- st.markdown("### 🌐 Grid Analysis")
- st.markdown(
- """
- Analisis struktur grid jaringan listrik untuk identifikasi pola distribusi,
- area coverage, dan efisiensi spasial.
- """
- )
-
- try:
- # Grid analysis berdasarkan koordinat geografis
- node_coords = [
- (G.nodes[node]["x"], G.nodes[node]["y"]) for node in G.nodes()
- ]
-
- if node_coords:
- x_coords = [coord[0] for coord in node_coords]
- y_coords = [coord[1] for coord in node_coords]
-
- # Basic grid statistics
- col1, col2, col3 = st.columns(3)
- with col1:
- x_range = max(x_coords) - min(x_coords)
- st.metric("Coverage X (East-West)", f"{x_range:.0f}m")
- with col2:
- y_range = max(y_coords) - min(y_coords)
- st.metric("Coverage Y (North-South)", f"{y_range:.0f}m")
- with col3:
- grid_area = x_range * y_range / 1_000_000 # km²
- st.metric("Total Coverage Area", f"{grid_area:.2f} km²")
-
- # Advanced grid analysis
- st.markdown("### 📊 Grid Density & Distribution Analysis")
-
- col1, col2 = st.columns(2)
- with col1:
- # Node density
- density = (
- G.number_of_nodes() / grid_area if grid_area > 0 else 0
- )
- st.write(f"**Node Density:** {density:.2f} nodes/km²")
-
- # Average node spacing
- avg_spacing = (
- np.sqrt(grid_area * 1_000_000 / G.number_of_nodes())
- if G.number_of_nodes() > 0
- else 0
- )
- st.write(f"**Average Node Spacing:** {avg_spacing:.0f}m")
-
- # Edge density
- edge_density = (
- G.number_of_edges() / grid_area if grid_area > 0 else 0
- )
- st.write(f"**Edge Density:** {edge_density:.2f} edges/km²")
-
- # Network efficiency metrics
- total_length_km = connectivity_analysis.get(
- "total_network_length_km", 0
- )
- length_density = (
- total_length_km / grid_area if grid_area > 0 else 0
- )
- st.write(f"**Cable Density:** {length_density:.2f} km/km²")
-
- with col2:
- # Grid regularity analysis
- st.markdown("**Grid Regularity:**")
-
- # Calculate nearest neighbor distances
- distances = []
- for i, coord1 in enumerate(node_coords):
- min_dist = float("inf")
- for j, coord2 in enumerate(node_coords):
- if i != j:
- dist = np.sqrt(
- (coord1[0] - coord2[0]) ** 2
- + (coord1[1] - coord2[1]) ** 2
- )
- if dist < min_dist:
- min_dist = dist
- if min_dist != float("inf"):
- distances.append(min_dist)
-
- if distances:
- mean_nn_dist = np.mean(distances)
- std_nn_dist = np.std(distances)
- regularity_index = (
- std_nn_dist / mean_nn_dist if mean_nn_dist > 0 else 0
- )
-
- st.write(f"- Mean NN Distance: {mean_nn_dist:.0f}m")
- st.write(f"- Std NN Distance: {std_nn_dist:.0f}m")
- st.write(f"- Regularity Index: {regularity_index:.3f}")
-
- if regularity_index < 0.3:
- st.write("- Pattern: **Regular Grid** 🟢")
- elif regularity_index < 0.7:
- st.write("- Pattern: **Semi-Regular** 🟡")
- else:
- st.write("- Pattern: **Irregular/Random** 🔴")
-
- # Grid quadrant analysis
- st.markdown("### 🗺️ Quadrant Analysis")
-
- # Divide into 4 quadrants
- x_center = (max(x_coords) + min(x_coords)) / 2
- y_center = (max(y_coords) + min(y_coords)) / 2
-
- quadrants = {"NE": [], "NW": [], "SE": [], "SW": []}
-
- for i, (x, y) in enumerate(node_coords):
- if x >= x_center and y >= y_center:
- quadrants["NE"].append(i)
- elif x < x_center and y >= y_center:
- quadrants["NW"].append(i)
- elif x >= x_center and y < y_center:
- quadrants["SE"].append(i)
- else:
- quadrants["SW"].append(i)
-
- col1, col2, col3, col4 = st.columns(4)
- cols = [col1, col2, col3, col4]
- quad_names = ["NE", "NW", "SE", "SW"]
-
- for i, (quad, nodes_in_quad) in enumerate(quadrants.items()):
- with cols[i]:
- st.metric(f"Quadrant {quad}", len(nodes_in_quad))
- density_quad = (
- len(nodes_in_quad) / (grid_area / 4)
- if grid_area > 0
- else 0
- )
- st.write(f"Density: {density_quad:.1f}/km²")
-
- # Grid coverage heatmap
- st.markdown("### 🔥 Grid Coverage Heatmap")
-
- # Create grid for heatmap
- grid_size = 20
- x_bins = np.linspace(min(x_coords), max(x_coords), grid_size)
- y_bins = np.linspace(min(y_coords), max(y_coords), grid_size)
-
- # Count nodes in each grid cell
- heatmap_data = np.zeros((grid_size - 1, grid_size - 1))
- for x, y in zip(x_coords, y_coords):
- x_idx = np.digitize(x, x_bins) - 1
- y_idx = np.digitize(y, y_bins) - 1
-
- # Ensure indices are within bounds
- x_idx = max(0, min(x_idx, grid_size - 2))
- y_idx = max(0, min(y_idx, grid_size - 2))
-
- heatmap_data[y_idx, x_idx] += 1
-
- # Create heatmap
- fig_heatmap = px.imshow(
- heatmap_data,
- title="Node Density Heatmap",
- labels={
- "x": "East-West Grid",
- "y": "North-South Grid",
- "color": "Node Count",
- },
- color_continuous_scale="Viridis",
- )
- st.plotly_chart(fig_heatmap, use_container_width=True)
-
- # Scatter plot with quadrant overlay
- fig_scatter = px.scatter(
- x=x_coords,
- y=y_coords,
- title="Grid Spatial Distribution with Quadrants",
- labels={"x": "UTM Easting (m)", "y": "UTM Northing (m)"},
- )
-
- # Add quadrant lines
- fig_scatter.add_vline(
- x=x_center,
- line_dash="dash",
- line_color="red",
- annotation_text="E-W Center",
- )
- fig_scatter.add_hline(
- y=y_center,
- line_dash="dash",
- line_color="red",
- annotation_text="N-S Center",
- )
-
- fig_scatter.update_traces(marker=dict(size=6, opacity=0.7))
- st.plotly_chart(fig_scatter, use_container_width=True)
-
- except Exception as e:
- st.error(f"Error in grid analysis: {str(e)}")
-
- with adv_tab5:
- st.markdown("### 🚛 Traveling Salesman Problem (TSP)")
- st.markdown(
- """
- Optimasi rute untuk maintenance dan inspeksi jaringan listrik,
- mencari jalur terpendek yang mengunjungi semua node penting.
- """
- )
-
- # TSP parameters
- col1, col2 = st.columns(2)
- with col1:
- max_tsp_nodes = st.slider(
- "Jumlah Node untuk TSP", 5, min(25, G.number_of_nodes()), 12
- )
- with col2:
- selection_method = st.selectbox(
- "Metode Seleksi Node:",
- ["Highest Degree", "Random", "Geographic Spread", "Critical Nodes"],
- )
-
- try:
- # Select nodes based on method
- if selection_method == "Highest Degree":
- degrees = dict(G.degree())
- selected_nodes = sorted(
- degrees.keys(), key=lambda x: degrees[x], reverse=True
- )[:max_tsp_nodes]
- st.write(f"**Selected: Top {len(selected_nodes)} nodes by degree**")
-
- elif selection_method == "Random":
- selected_nodes = list(
- np.random.choice(
- list(G.nodes()),
- size=min(max_tsp_nodes, G.number_of_nodes()),
- replace=False,
- )
- )
- st.write(f"**Selected: {len(selected_nodes)} random nodes**")
-
- elif selection_method == "Geographic Spread":
- # Select nodes that are geographically spread
- node_coords = {
- node: (G.nodes[node]["x"], G.nodes[node]["y"])
- for node in G.nodes()
- }
- selected_nodes = [list(G.nodes())[0]] # Start with first node
-
- for _ in range(min(max_tsp_nodes - 1, G.number_of_nodes() - 1)):
- max_min_dist = 0
- best_node = None
-
- for candidate in G.nodes():
- if candidate not in selected_nodes:
- min_dist_to_selected = min(
- [
- np.sqrt(
- (
- node_coords[candidate][0]
- - node_coords[selected][0]
- )
- ** 2
- + (
- node_coords[candidate][1]
- - node_coords[selected][1]
- )
- ** 2
- )
- for selected in selected_nodes
- ]
- )
-
- if min_dist_to_selected > max_min_dist:
- max_min_dist = min_dist_to_selected
- best_node = candidate
-
- if best_node:
- selected_nodes.append(best_node)
-
- st.write(
- f"**Selected: {len(selected_nodes)} geographically spread nodes**"
- )
-
- else: # Critical Nodes
- # Select based on combination of degree and betweenness centrality
- centrality = centrality_measures
- critical_scores = {}
-
- for node in G.nodes():
- degree_cent = centrality["degree"].get(node, 0)
- between_cent = centrality["betweenness"].get(node, 0)
- critical_scores[node] = degree_cent + between_cent
-
- selected_nodes = sorted(
- critical_scores.keys(),
- key=lambda x: critical_scores[x],
- reverse=True,
- )[:max_tsp_nodes]
- st.write(f"**Selected: Top {len(selected_nodes)} critical nodes**")
-
- # Display selected nodes info
- col1, col2 = st.columns(2)
- with col1:
- st.write("**Top Selected Nodes:**")
- degrees = dict(G.degree())
- for i, node in enumerate(selected_nodes[:8]):
- st.write(f"- Node {node}: degree {degrees[node]}")
- if len(selected_nodes) > 8:
- st.write(f"... dan {len(selected_nodes)-8} node lainnya")
-
- with col2:
- # Calculate some basic stats for selected nodes
- selected_degrees = [degrees[node] for node in selected_nodes]
- st.write("**Selection Statistics:**")
- st.write(f"- Total nodes: {len(selected_nodes)}")
- st.write(f"- Avg degree: {np.mean(selected_degrees):.1f}")
- st.write(f"- Max degree: {max(selected_degrees)}")
- st.write(f"- Min degree: {min(selected_degrees)}")
-
- # TSP Algorithm implementations
- tsp_algo_tab1, tsp_algo_tab2, tsp_algo_tab3 = st.tabs(
- ["🎯 Nearest Neighbor", "🔄 2-Opt", "🧬 Genetic Algorithm"]
- )
-
- with tsp_algo_tab1:
- st.markdown("**Nearest Neighbor Heuristic**")
-
- # Nearest neighbor TSP
- if len(selected_nodes) > 2:
- start_node = selected_nodes[0]
- tsp_tour = [start_node]
- current = start_node
- unvisited = set(selected_nodes[1:])
- total_distance = 0
- tour_edges = []
-
- while unvisited:
- nearest = None
- min_dist = float("inf")
-
- # Find nearest unvisited node
- for next_node in unvisited:
- try:
- # Calculate shortest path distance
- path_length = nx.shortest_path_length(
- G, current, next_node, weight="weight"
- )
- if path_length < min_dist:
- min_dist = path_length
- nearest = next_node
- except nx.NetworkXNoPath:
- continue
-
- if nearest:
- tsp_tour.append(nearest)
- tour_edges.append((current, nearest, min_dist))
- total_distance += min_dist
- current = nearest
- unvisited.remove(nearest)
- else:
- # No path found, take first available
- if unvisited:
- nearest = list(unvisited)[0]
- tsp_tour.append(nearest)
- unvisited.remove(nearest)
- current = nearest
-
- # Return to start
- try:
- return_dist = nx.shortest_path_length(
- G, current, start_node, weight="weight"
- )
- total_distance += return_dist
- tour_edges.append((current, start_node, return_dist))
- except nx.NetworkXNoPath:
- st.warning("Cannot return to start node (disconnected)")
-
- col1, col2, col3 = st.columns(3)
- with col1:
- st.metric("Total TSP Distance", f"{total_distance:.2f}m")
- with col2:
- st.metric("Nodes in Tour", len(tsp_tour))
- with col3:
- avg_hop = (
- total_distance / len(tsp_tour)
- if len(tsp_tour) > 0
- else 0
- )
- st.metric("Average Hop", f"{avg_hop:.2f}m")
-
- # Tour sequence
- st.markdown("**TSP Tour Sequence (Nearest Neighbor):**")
- tour_text = " → ".join([str(node) for node in tsp_tour[:15]])
- if len(tsp_tour) > 15:
- tour_text += " → ..."
- tour_text += f" → {start_node}"
- st.code(tour_text)
-
- # Tour efficiency analysis
- if connectivity_analysis.get("total_network_length_m", 0) > 0:
- efficiency = (
- total_distance
- / connectivity_analysis["total_network_length_m"]
- ) * 100
- st.write(
- f"**Tour Efficiency:** {efficiency:.1f}% of total network length"
- )
-
- with tsp_algo_tab2:
- st.markdown("**2-Opt Improvement Heuristic**")
-
- if "tsp_tour" in locals() and len(tsp_tour) > 3:
- st.write(
- "Menerapkan 2-Opt improvement pada hasil Nearest Neighbor..."
- )
-
- # Simple 2-opt improvement (limited iterations for performance)
- improved_tour = tsp_tour.copy()
- improved_distance = total_distance
- max_iterations = min(50, len(tsp_tour) * 2)
- improvements = 0
-
- for iteration in range(max_iterations):
- best_improvement = 0
- best_i, best_j = -1, -1
-
- for i in range(1, len(improved_tour) - 2):
- for j in range(i + 1, len(improved_tour)):
- if j - i == 1:
- continue # Skip adjacent edges
-
- # Calculate current edge distances
- try:
- current_dist = nx.shortest_path_length(
- G,
- improved_tour[i - 1],
- improved_tour[i],
- weight="weight",
- ) + nx.shortest_path_length(
- G,
- improved_tour[j - 1],
- improved_tour[j % len(improved_tour)],
- weight="weight",
- )
-
- # Calculate new edge distances after 2-opt swap
- new_dist = nx.shortest_path_length(
- G,
- improved_tour[i - 1],
- improved_tour[j - 1],
- weight="weight",
- ) + nx.shortest_path_length(
- G,
- improved_tour[i],
- improved_tour[j % len(improved_tour)],
- weight="weight",
- )
-
- improvement = current_dist - new_dist
- if improvement > best_improvement:
- best_improvement = improvement
- best_i, best_j = i, j
-
- except nx.NetworkXNoPath:
- continue
-
- # Apply best improvement
- if best_improvement > 0:
- improved_tour[best_i:best_j] = reversed(
- improved_tour[best_i:best_j]
- )
- improved_distance -= best_improvement
- improvements += 1
- else:
- break # No more improvements found
-
- col1, col2, col3 = st.columns(3)
- with col1:
- st.metric("Improved Distance", f"{improved_distance:.2f}m")
- improvement_pct = (
- (
- (total_distance - improved_distance)
- / total_distance
- * 100
- )
- if total_distance > 0
- else 0
- )
- st.metric("Improvement", f"{improvement_pct:.1f}%")
- with col2:
- st.metric("Iterations Run", iteration + 1)
- st.metric("Improvements Made", improvements)
- with col3:
- distance_saved = total_distance - improved_distance
- st.metric("Distance Saved", f"{distance_saved:.2f}m")
-
- if improvements > 0:
- st.success(
- f"✅ 2-Opt found {improvements} improvements in {iteration + 1} iterations"
- )
-
- # Show improved tour
- st.markdown("**Improved TSP Tour (2-Opt):**")
- improved_text = " → ".join(
- [str(node) for node in improved_tour[:15]]
- )
- if len(improved_tour) > 15:
- improved_text += " → ..."
- st.code(improved_text)
- else:
- st.info("ℹ️ No improvements found with 2-Opt")
- else:
- st.warning("Run Nearest Neighbor first to get initial tour")
-
- with tsp_algo_tab3:
- st.markdown("**Genetic Algorithm (Simplified)**")
-
- if len(selected_nodes) > 4:
- st.write(
- "Implementasi Genetic Algorithm sederhana untuk TSP..."
- )
-
- # GA parameters
- population_size = min(20, len(selected_nodes) * 2)
- generations = min(50, len(selected_nodes) * 3)
- mutation_rate = 0.1
-
- col1, col2, col3 = st.columns(3)
- with col1:
- st.write(f"Population Size: {population_size}")
- with col2:
- st.write(f"Generations: {generations}")
- with col3:
- st.write(f"Mutation Rate: {mutation_rate}")
-
- # Initialize population with random tours
- population = []
- for _ in range(population_size):
- tour = selected_nodes.copy()
- np.random.shuffle(tour)
- population.append(tour)
-
- # Evolution progress tracking
- best_distances = []
- avg_distances = []
-
- def calculate_tour_distance(tour):
- total_dist = 0
- for i in range(len(tour)):
- current = tour[i]
- next_node = tour[(i + 1) % len(tour)]
- try:
- dist = nx.shortest_path_length(
- G, current, next_node, weight="weight"
- )
- total_dist += dist
- except nx.NetworkXNoPath:
- total_dist += (
- 10000 # Penalty for disconnected nodes
- )
- return total_dist
-
- # Run GA (simplified version)
- progress_bar = st.progress(0)
- for gen in range(generations):
- # Evaluate population
- fitness_scores = []
- for tour in population:
- distance = calculate_tour_distance(tour)
- fitness_scores.append(
- 1 / (1 + distance)
- ) # Fitness = 1/(1+distance)
-
- # Track progress
- distances = [1 / f - 1 for f in fitness_scores]
- best_distances.append(min(distances))
- avg_distances.append(np.mean(distances))
-
- # Selection (tournament selection)
- new_population = []
- for _ in range(population_size):
- # Tournament selection
- tournament_size = 3
- tournament_indices = np.random.choice(
- len(population), tournament_size, replace=False
- )
- winner_idx = max(
- tournament_indices, key=lambda i: fitness_scores[i]
- )
- new_population.append(population[winner_idx].copy())
-
- # Crossover and mutation (simplified)
- for i in range(0, len(new_population), 2):
- if (
- i + 1 < len(new_population)
- and np.random.random() < 0.8
- ): # Crossover probability
- # Order crossover (simplified)
- parent1, parent2 = (
- new_population[i],
- new_population[i + 1],
- )
- cut1, cut2 = sorted(
- np.random.choice(len(parent1), 2, replace=False)
- )
-
- child1 = [-1] * len(parent1)
- child1[cut1:cut2] = parent1[cut1:cut2]
-
- # Fill remaining positions
- remaining = [x for x in parent2 if x not in child1]
- j = 0
- for k in range(len(child1)):
- if child1[k] == -1:
- child1[k] = remaining[j]
- j += 1
-
- new_population[i] = child1
-
- # Mutation
- for tour in new_population:
- if np.random.random() < mutation_rate:
- # Swap mutation
- i, j = np.random.choice(len(tour), 2, replace=False)
- tour[i], tour[j] = tour[j], tour[i]
-
- population = new_population
- progress_bar.progress((gen + 1) / generations)
-
- # Get best solution
- final_fitness = [
- calculate_tour_distance(tour) for tour in population
- ]
- best_idx = np.argmin(final_fitness)
- best_ga_tour = population[best_idx]
- best_ga_distance = final_fitness[best_idx]
-
- col1, col2 = st.columns(2)
- with col1:
- st.metric("Best GA Distance", f"{best_ga_distance:.2f}m")
- if "improved_distance" in locals():
- ga_improvement = (
- (
- (improved_distance - best_ga_distance)
- / improved_distance
- * 100
- )
- if improved_distance > 0
- else 0
- )
- st.metric("vs 2-Opt", f"{ga_improvement:.1f}%")
- with col2:
- st.metric("Generations Run", generations)
- initial_best = best_distances[0] if best_distances else 0
- final_best = best_distances[-1] if best_distances else 0
- total_improvement = (
- ((initial_best - final_best) / initial_best * 100)
- if initial_best > 0
- else 0
- )
- st.metric(
- "Total GA Improvement", f"{total_improvement:.1f}%"
- )
-
- # Evolution progress chart
- if best_distances:
- fig_evolution = px.line(
- x=list(range(len(best_distances))),
- y=best_distances,
- title="Genetic Algorithm Evolution",
- labels={"x": "Generation", "y": "Best Distance (m)"},
- )
- st.plotly_chart(fig_evolution, use_container_width=True)
-
- # Best GA tour
- st.markdown("**Best GA Tour:**")
- ga_tour_text = " → ".join(
- [str(node) for node in best_ga_tour[:15]]
- )
- if len(best_ga_tour) > 15:
- ga_tour_text += " → ..."
- st.code(ga_tour_text)
-
- else:
- st.warning("Need at least 5 nodes for meaningful GA")
-
- # TSP Visualization
- if len(selected_nodes) > 2:
- st.markdown("### 🎨 TSP Subgraph Visualization")
- tsp_subgraph = G.subgraph(selected_nodes).copy()
-
- if tsp_subgraph.number_of_nodes() > 0:
- tsp_centrality = calculate_centrality_measures(tsp_subgraph)
- tsp_fig = create_network_visualization(
- tsp_subgraph,
- st.session_state["nodes"],
- tsp_centrality,
- show_labels=True,
- )
- tsp_fig.update_layout(
- title=dict(
- text=f"TSP Subgraph - {len(selected_nodes)} Selected Nodes",
- font=dict(size=16),
- )
- )
- st.plotly_chart(tsp_fig, use_container_width=True)
-
- except Exception as e:
- st.error(f"Error in TSP analysis: {str(e)}")
-
- with adv_tab6:
- st.markdown("### 🎲 Generative Graph Model")
- st.markdown(
- """
- Membuat model generatif untuk mensimulasikan pertumbuhan jaringan listrik
- berdasarkan karakteristik jaringan existing.
- """
- )
-
- try:
- # Parameters for generative models
- col1, col2 = st.columns(2)
- with col1:
- model_type = st.selectbox(
- "Pilih Model Generatif:",
- [
- "Erdős–Rényi",
- "Barabási–Albert",
- "Watts-Strogatz",
- "Geographic",
- ],
- )
- with col2:
- num_generated_nodes = st.slider(
- "Jumlah Node Generated",
- 50,
- min(500, G.number_of_nodes() * 2),
- G.number_of_nodes(),
- )
-
- gen_tab1, gen_tab2, gen_tab3, gen_tab4 = st.tabs(
- ["🎲 Random", "🌟 Scale-Free", "🔄 Small-World", "🗺️ Geographic"]
+ else:
+ st.warning(
+ "Tidak dapat membuat MST - graf mungkin tidak terhubung atau kosong"
)
- with gen_tab1:
- st.markdown("**Erdős–Rényi Random Graph Model**")
-
- # Calculate probability from existing graph
- n = G.number_of_nodes()
- m = G.number_of_edges()
- p_original = (2 * m) / (n * (n - 1)) if n > 1 else 0
-
- p_param = st.slider(
- "Edge Probability", 0.001, 0.1, min(p_original, 0.05), 0.001
- )
-
- # Generate ER graph
- er_graph = nx.erdos_renyi_graph(num_generated_nodes, p_param)
-
- # Add random weights to edges
- for u, v in er_graph.edges():
- # Weight based on original network statistics
- if connectivity_analysis.get("avg_segment_length_m", 0) > 0:
- avg_length = connectivity_analysis["avg_segment_length_m"]
- std_length = avg_length * 0.5 # Assume 50% variation
- weight = max(10, np.random.normal(avg_length, std_length))
- else:
- weight = np.random.uniform(50, 500) # Default range
- er_graph[u][v]["weight"] = weight
-
- # Add positions for visualization
- pos_er = nx.spring_layout(er_graph)
- for node in er_graph.nodes():
- er_graph.nodes[node]["x"] = pos_er[node][0] * 1000
- er_graph.nodes[node]["y"] = pos_er[node][1] * 1000
-
- # Compare with original
- col1, col2, col3 = st.columns(3)
- with col1:
- st.metric("Generated Nodes", er_graph.number_of_nodes())
- st.metric("Generated Edges", er_graph.number_of_edges())
- with col2:
- er_density = nx.density(er_graph)
- st.metric("Generated Density", f"{er_density:.4f}")
- original_density = connectivity_analysis.get("density", 0)
- st.metric("Original Density", f"{original_density:.4f}")
- with col3:
- er_avg_degree = np.mean([d for n, d in er_graph.degree()])
- st.metric("Generated Avg Degree", f"{er_avg_degree:.2f}")
- original_avg_degree = connectivity_analysis.get("avg_degree", 0)
- st.metric("Original Avg Degree", f"{original_avg_degree:.2f}")
-
- # Visualize ER graph
- if er_graph.number_of_nodes() > 0:
- er_centrality = calculate_centrality_measures(er_graph)
- er_fig = create_network_visualization(
- er_graph,
- list(pos_er.values()),
- er_centrality,
- show_labels=False,
- )
- er_fig.update_layout(
- title=dict(
- text=f"Erdős–Rényi Generated Graph (p={p_param})",
- font=dict(size=16),
- )
- )
- st.plotly_chart(er_fig, use_container_width=True)
-
- st.markdown("**Karakteristik ER Model:**")
- st.write(
- "- Setiap edge memiliki probabilitas independen untuk terbentuk"
- )
- st.write(
- "- Distribusi degree mengikuti binomial (mendekati Poisson)"
- )
- st.write("- Tidak ada struktur hierarkis atau geografis")
-
- with gen_tab2:
- st.markdown("**Barabási–Albert Scale-Free Model**")
-
- # Parameters for BA model
- m_param = st.slider(
- "Edges per new node (m)",
- 1,
- min(10, G.number_of_nodes() // 4),
- 3,
- )
-
- # Generate BA graph
- ba_graph = nx.barabasi_albert_graph(num_generated_nodes, m_param)
-
- # Add random weights
- for u, v in ba_graph.edges():
- if connectivity_analysis.get("avg_segment_length_m", 0) > 0:
- avg_length = connectivity_analysis["avg_segment_length_m"]
- std_length = avg_length * 0.5
- weight = max(10, np.random.normal(avg_length, std_length))
- else:
- weight = np.random.uniform(50, 500)
- ba_graph[u][v]["weight"] = weight
-
- # Add positions
- pos_ba = nx.spring_layout(ba_graph, k=0.5, iterations=50)
- for node in ba_graph.nodes():
- ba_graph.nodes[node]["x"] = pos_ba[node][0] * 1000
- ba_graph.nodes[node]["y"] = pos_ba[node][1] * 1000
-
- # Analyze degree distribution
- ba_degrees = [d for n, d in ba_graph.degree()]
- original_degrees = [d for n, d in G.degree()]
-
- col1, col2 = st.columns(2)
- with col1:
- st.metric("Generated Max Degree", max(ba_degrees))
- st.metric("Generated Min Degree", min(ba_degrees))
-
- # Check for power law characteristics
- degree_counts = {}
- for deg in ba_degrees:
- degree_counts[deg] = degree_counts.get(deg, 0) + 1
-
- high_degree_nodes = len(
- [
- d
- for d in ba_degrees
- if d > np.mean(ba_degrees) + 2 * np.std(ba_degrees)
- ]
- )
- st.metric("High-Degree Hubs", high_degree_nodes)
-
- with col2:
- st.metric(
- "Original Max Degree",
- max(original_degrees) if original_degrees else 0,
- )
- st.metric(
- "Original Min Degree",
- min(original_degrees) if original_degrees else 0,
- )
-
- original_high_degree = len(
- [
- d
- for d in original_degrees
- if d
- > np.mean(original_degrees)
- + 2 * np.std(original_degrees)
- ]
- )
- st.metric("Original Hubs", original_high_degree)
-
- # Degree distribution comparison
- fig_degree_comparison = px.histogram(
- x=ba_degrees + original_degrees,
- color=["Generated"] * len(ba_degrees)
- + ["Original"] * len(original_degrees),
- title="Degree Distribution Comparison",
- labels={"x": "Degree", "y": "Count"},
- barmode="overlay",
- opacity=0.7,
- )
- st.plotly_chart(fig_degree_comparison, use_container_width=True)
-
- # Visualize BA graph
- if ba_graph.number_of_nodes() > 0:
- ba_centrality = calculate_centrality_measures(ba_graph)
- ba_fig = create_network_visualization(
- ba_graph,
- list(pos_ba.values()),
- ba_centrality,
- show_labels=False,
- )
- ba_fig.update_layout(
- title=dict(
- text=f"Barabási–Albert Generated Graph (m={m_param})",
- font=dict(size=16),
- )
- )
- st.plotly_chart(ba_fig, use_container_width=True)
-
- st.markdown("**Karakteristik BA Model:**")
- st.write("- 'Rich get richer' - preferential attachment")
- st.write("- Power-law degree distribution (scale-free)")
- st.write("- Emergence of hub nodes dengan degree tinggi")
-
- with gen_tab3:
- st.markdown("**Watts-Strogatz Small-World Model**")
-
- # Parameters for WS model
- col1, col2 = st.columns(2)
- with col1:
- k_param = st.slider(
- "Initial degree (k)",
- 2,
- min(20, num_generated_nodes // 5),
- 6,
- )
- with col2:
- p_rewire = st.slider("Rewiring probability", 0.0, 1.0, 0.3, 0.1)
-
- # Generate WS graph
- try:
- ws_graph = nx.watts_strogatz_graph(
- num_generated_nodes, k_param, p_rewire
- )
-
- # Add random weights
- for u, v in ws_graph.edges():
- if connectivity_analysis.get("avg_segment_length_m", 0) > 0:
- avg_length = connectivity_analysis[
- "avg_segment_length_m"
- ]
- std_length = avg_length * 0.4
- weight = max(
- 10, np.random.normal(avg_length, std_length)
- )
- else:
- weight = np.random.uniform(50, 500)
- ws_graph[u][v]["weight"] = weight
-
- # Add positions (circular layout for better small-world visualization)
- pos_ws = nx.circular_layout(ws_graph)
- for node in ws_graph.nodes():
- ws_graph.nodes[node]["x"] = pos_ws[node][0] * 1000
- ws_graph.nodes[node]["y"] = pos_ws[node][1] * 1000
-
- # Calculate small-world metrics
- col1, col2, col3 = st.columns(3)
- with col1:
- try:
- ws_clustering = nx.average_clustering(ws_graph)
- st.metric(
- "Generated Clustering", f"{ws_clustering:.4f}"
- )
- except:
- st.metric("Generated Clustering", "N/A")
-
- with col2:
- try:
- if nx.is_connected(ws_graph):
- ws_path_length = nx.average_shortest_path_length(
- ws_graph
- )
- st.metric(
- "Generated Path Length", f"{ws_path_length:.2f}"
- )
- else:
- st.metric("Generated Path Length", "N/A")
- except:
- st.metric("Generated Path Length", "N/A")
-
- with col3:
- # Small-world coefficient (simplified)
- try:
- original_clustering = (
- nx.average_clustering(G)
- if G.number_of_nodes() > 0
- else 0
- )
- st.metric(
- "Original Clustering", f"{original_clustering:.4f}"
- )
- except:
- st.metric("Original Clustering", "N/A")
-
- # Rewiring analysis
- st.markdown("**Rewiring Analysis:**")
- rewired_edges = 0
- total_edges = ws_graph.number_of_edges()
- expected_rewired = int(
- total_edges * p_rewire / 2
- ) # Approximate
-
- st.write(f"- Expected rewired edges: ~{expected_rewired}")
- st.write(f"- Rewiring probability: {p_rewire}")
- st.write(f"- Total edges: {total_edges}")
-
- if p_rewire < 0.1:
- st.write("- Network type: **Regular** (low rewiring)")
- elif p_rewire > 0.5:
- st.write("- Network type: **Random** (high rewiring)")
- else:
- st.write(
- "- Network type: **Small-World** (optimal rewiring)"
- )
-
- # Visualize WS graph
- if ws_graph.number_of_nodes() > 0:
- ws_centrality = calculate_centrality_measures(ws_graph)
- ws_fig = create_network_visualization(
- ws_graph,
- list(pos_ws.values()),
- ws_centrality,
- show_labels=False,
- )
- ws_fig.update_layout(
- title=dict(
- text=f"Watts-Strogatz Generated Graph (k={k_param}, p={p_rewire})",
- font=dict(size=16),
- )
- )
- st.plotly_chart(ws_fig, use_container_width=True)
-
- st.markdown("**Karakteristik WS Model:**")
- st.write("- High clustering seperti regular networks")
- st.write("- Short path lengths seperti random networks")
- st.write(
- "- 'Small-world' phenomenon: globally connected, locally clustered"
- )
-
- except Exception as e:
- st.error(f"Error generating WS graph: {str(e)}")
-
- with gen_tab4:
- st.markdown("**Geographic/Spatial Network Model**")
-
- # Parameters for geographic model
- col1, col2 = st.columns(2)
- with col1:
- connection_radius = st.slider(
- "Connection Radius (m)", 100, 2000, 500, 50
- )
- with col2:
- spatial_distribution = st.selectbox(
- "Spatial Distribution",
- ["Uniform", "Clustered", "Grid-like"],
- )
-
- try:
- # Generate geographic positions
- if G.number_of_nodes() > 0:
- # Use bounding box from original network
- node_coords = [
- (G.nodes[node]["x"], G.nodes[node]["y"])
- for node in G.nodes()
- ]
- x_coords = [coord[0] for coord in node_coords]
- y_coords = [coord[1] for coord in node_coords]
-
- x_min, x_max = min(x_coords), max(x_coords)
- y_min, y_max = min(y_coords), max(y_coords)
-
- # Expand bounding box slightly
- x_range = x_max - x_min
- y_range = y_max - y_min
- x_min -= x_range * 0.1
- x_max += x_range * 0.1
- y_min -= y_range * 0.1
- y_max += y_range * 0.1
- else:
- x_min, x_max = 0, 10000
- y_min, y_max = 0, 10000
-
- # Generate positions based on distribution type
- geographic_graph = nx.Graph()
- positions = {}
-
- if spatial_distribution == "Uniform":
- for i in range(num_generated_nodes):
- x = np.random.uniform(x_min, x_max)
- y = np.random.uniform(y_min, y_max)
- positions[i] = (x, y)
- geographic_graph.add_node(i, x=x, y=y)
-
- elif spatial_distribution == "Clustered":
- # Generate several cluster centers
- num_clusters = max(3, num_generated_nodes // 20)
- cluster_centers = []
- for _ in range(num_clusters):
- cx = np.random.uniform(x_min, x_max)
- cy = np.random.uniform(y_min, y_max)
- cluster_centers.append((cx, cy))
-
- for i in range(num_generated_nodes):
- # Choose random cluster
- cluster = np.random.choice(len(cluster_centers))
- cx, cy = cluster_centers[cluster]
-
- # Add noise around cluster center
- cluster_radius = min(x_max - x_min, y_max - y_min) / (
- num_clusters * 2
- )
- x = cx + np.random.normal(0, cluster_radius / 3)
- y = cy + np.random.normal(0, cluster_radius / 3)
-
- # Keep within bounds
- x = np.clip(x, x_min, x_max)
- y = np.clip(y, y_min, y_max)
-
- positions[i] = (x, y)
- geographic_graph.add_node(i, x=x, y=y)
-
- else: # Grid-like
- grid_size = int(np.sqrt(num_generated_nodes))
- x_step = (x_max - x_min) / grid_size
- y_step = (y_max - y_min) / grid_size
-
- node_id = 0
- for i in range(grid_size):
- for j in range(grid_size):
- if node_id >= num_generated_nodes:
- break
-
- # Grid position with some random jitter
- x = (
- x_min
- + i * x_step
- + np.random.normal(0, x_step / 4)
- )
- y = (
- y_min
- + j * y_step
- + np.random.normal(0, y_step / 4)
- )
-
- positions[node_id] = (x, y)
- geographic_graph.add_node(node_id, x=x, y=y)
- node_id += 1
-
- # Connect nodes within radius
- nodes_list = list(geographic_graph.nodes())
- edges_added = 0
-
- for i in range(len(nodes_list)):
- for j in range(i + 1, len(nodes_list)):
- node1, node2 = nodes_list[i], nodes_list[j]
- x1, y1 = positions[node1]
- x2, y2 = positions[node2]
-
- distance = np.sqrt((x2 - x1) ** 2 + (y2 - y1) ** 2)
-
- if distance <= connection_radius:
- geographic_graph.add_edge(
- node1, node2, weight=distance
- )
- edges_added += 1
-
- # Statistics
- col1, col2, col3 = st.columns(3)
- with col1:
- st.metric(
- "Generated Nodes", geographic_graph.number_of_nodes()
- )
- st.metric(
- "Generated Edges", geographic_graph.number_of_edges()
- )
- with col2:
- geo_density = nx.density(geographic_graph)
- st.metric("Network Density", f"{geo_density:.4f}")
-
- # Calculate average node degree
- if geographic_graph.number_of_nodes() > 0:
- avg_degree_geo = np.mean(
- [d for n, d in geographic_graph.degree()]
- )
- st.metric("Average Degree", f"{avg_degree_geo:.2f}")
- with col3:
- st.metric("Connection Radius", f"{connection_radius}m")
- st.metric("Distribution Type", spatial_distribution)
-
- # Connectivity analysis
- geo_components = nx.number_connected_components(
- geographic_graph
- )
- if geo_components > 1:
- st.warning(
- f"⚠️ Network has {geo_components} disconnected components"
- )
- st.write(
- "Consider increasing connection radius for better connectivity"
- )
- else:
- st.success("✅ Network is fully connected")
-
- # Distance distribution
- if geographic_graph.number_of_edges() > 0:
- edge_distances = [
- data["weight"]
- for u, v, data in geographic_graph.edges(data=True)
- ]
-
- fig_distances = px.histogram(
- x=edge_distances,
- title="Geographic Edge Distance Distribution",
- labels={"x": "Distance (m)", "y": "Count"},
- nbins=20,
- )
- st.plotly_chart(fig_distances, use_container_width=True)
-
- # Visualize geographic graph
- if geographic_graph.number_of_nodes() > 0:
- geo_centrality = calculate_centrality_measures(
- geographic_graph
- )
-
- # Use actual positions for visualization
- geo_nodes_for_viz = [
- positions[node] for node in geographic_graph.nodes()
- ]
-
- geo_fig = create_network_visualization(
- geographic_graph,
- geo_nodes_for_viz,
- geo_centrality,
- show_labels=False,
- )
- geo_fig.update_layout(
- title=dict(
- text=f"Geographic Generated Graph ({spatial_distribution}, r={connection_radius}m)",
- font=dict(size=16),
- )
- )
- st.plotly_chart(geo_fig, use_container_width=True)
-
- st.markdown("**Karakteristik Geographic Model:**")
- st.write("- Connections based on spatial proximity")
- st.write("- Realistic for infrastructure networks")
- st.write("- Clustering depends on spatial distribution")
-
- # Spatial statistics
- if len(edge_distances) > 0:
- st.write(
- f"- Average connection distance: {np.mean(edge_distances):.1f}m"
- )
- st.write(
- f"- Max connection distance: {max(edge_distances):.1f}m"
- )
- st.write(
- f"- Connection efficiency: {len(edge_distances)/len(nodes_list):.2f} edges/node"
- )
-
- except Exception as e:
- st.error(f"Error generating geographic graph: {str(e)}")
-
- # Model comparison summary
- st.markdown("### 📊 Generative Models Summary")
-
- comparison_text = """
- | Model | Best For | Characteristics | Use Case |
- |-------|----------|-----------------|----------|
- | **Erdős–Rényi** | Baseline comparison | Random, no structure | Null model testing |
- | **Barabási–Albert** | Hub-based networks | Scale-free, hubs | Social networks, internet |
- | **Watts-Strogatz** | Balanced networks | Small-world, clustering | Neural networks, social |
- | **Geographic** | Infrastructure | Spatial constraints | Power grids, roads |
- """
- st.markdown(comparison_text)
-
- st.markdown("### 💡 Rekomendasi Model")
- original_density = connectivity_analysis.get("density", 0)
- original_clustering = 0
- try:
- original_clustering = nx.average_clustering(G)
- except:
- pass
-
- if original_density < 0.01 and original_clustering > 0.3:
- st.success(
- "🎯 **Rekomendasi: Geographic Model** - Low density dengan clustering tinggi cocok untuk infrastructure networks"
- )
- elif (
- connectivity_analysis.get("max_degree", 0)
- > connectivity_analysis.get("avg_degree", 0) * 3
- ):
- st.success(
- "🎯 **Rekomendasi: Barabási–Albert Model** - Ada hub nodes yang dominan menunjukkan scale-free properties"
- )
- elif original_clustering > 0.5:
- st.success(
- "🎯 **Rekomendasi: Watts-Strogatz Model** - High clustering menunjukkan small-world characteristics"
- )
- else:
- st.success(
- "🎯 **Rekomendasi: Erdős–Rényi Model** - Network characteristics relatif random"
- )
-
- except Exception as e:
- st.error(f"Error in generative model analysis: {str(e)}")
-
else:
st.info("👆 Pilih sumber data dan klik tombol untuk memulai analisis")
@@ -3103,6 +1269,26 @@ def main():
"""
)
+ # Troubleshooting section
+ st.markdown("## 🔧 Troubleshooting")
+ st.markdown(
+ """
+ **Jika mengalami masalah:**
+
+ 1. **File tidak terbaca**: Pastikan file ZIP berisi shapefile lengkap (.shp, .shx, .dbf, .prj)
+ 2. **Error permission**: Aplikasi otomatis menangani permission di server
+ 3. **Timeout download**: Coba gunakan upload file jika download dari URL gagal
+ 4. **Visualisasi lambat**: Gunakan pengaturan performa untuk membatasi jumlah node
+ 5. **Memory error**: Coba dengan dataset yang lebih kecil
+ """
+ )
+
if __name__ == "__main__":
- main()
+ try:
+ main()
+ except Exception as e:
+ st.error(f"💥 Application Error: {str(e)}")
+ st.info(
+ "Silakan refresh halaman atau hubungi administrator jika masalah berlanjut."
+ )