Spaces:
Sleeping
Sleeping
Commit ยท
113119c
1
Parent(s): 3dce5a4
Fix slow request threshold not applying from UI
Browse files- Add slow_threshold parameter to LogAnalyzer class
- Pass threshold from Streamlit UI to analyzer
- Update summary table to display actual threshold used
- Slow request count now updates when threshold is changed
Fixes issue where slider was ignored and always used 3000ms.
๐ค Generated with [Claude Code](https://claude.com/claude-code)
Co-Authored-By: Claude <noreply@anthropic.com>
- app.py +8 -4
- log_parser.py +5 -3
app.py
CHANGED
|
@@ -53,13 +53,17 @@ def format_number(num: int) -> str:
|
|
| 53 |
|
| 54 |
def create_summary_table(stats: dict) -> pd.DataFrame:
|
| 55 |
"""Create summary statistics table."""
|
|
|
|
|
|
|
|
|
|
|
|
|
| 56 |
data = {
|
| 57 |
"Metric": [
|
| 58 |
"Total Requests (before filtering)",
|
| 59 |
"Excluded Requests (HEAD+Zabbix + 401)",
|
| 60 |
"Processed Requests",
|
| 61 |
"Errors (โ 200, โ 401)",
|
| 62 |
-
"Slow Requests (
|
| 63 |
"Peak RPS",
|
| 64 |
"Peak Timestamp",
|
| 65 |
"Avg Response Time (ms)",
|
|
@@ -192,7 +196,7 @@ def create_metrics_comparison(individual_stats: List[dict]) -> go.Figure:
|
|
| 192 |
return fig
|
| 193 |
|
| 194 |
|
| 195 |
-
def process_log_file(file_path: str, service_name: str = None) -> dict:
|
| 196 |
"""Process a single log file and return statistics."""
|
| 197 |
parser = IISLogParser(file_path)
|
| 198 |
if service_name:
|
|
@@ -206,7 +210,7 @@ def process_log_file(file_path: str, service_name: str = None) -> dict:
|
|
| 206 |
return None
|
| 207 |
|
| 208 |
with st.spinner(f"Analyzing {parser.service_name}..."):
|
| 209 |
-
analyzer = LogAnalyzer(df, parser.service_name)
|
| 210 |
|
| 211 |
stats = {
|
| 212 |
"summary": analyzer.get_summary_stats(),
|
|
@@ -280,7 +284,7 @@ def main():
|
|
| 280 |
file_name = uploaded_files[i].name
|
| 281 |
st.subheader(f"๐ {file_name}")
|
| 282 |
|
| 283 |
-
stats = process_log_file(temp_file, None)
|
| 284 |
if stats:
|
| 285 |
all_stats.append(stats)
|
| 286 |
|
|
|
|
| 53 |
|
| 54 |
def create_summary_table(stats: dict) -> pd.DataFrame:
|
| 55 |
"""Create summary statistics table."""
|
| 56 |
+
# Get threshold in seconds for display
|
| 57 |
+
threshold_ms = stats.get("slow_threshold", 3000)
|
| 58 |
+
threshold_display = f">{threshold_ms}ms" if threshold_ms >= 1000 else f">{threshold_ms}ms"
|
| 59 |
+
|
| 60 |
data = {
|
| 61 |
"Metric": [
|
| 62 |
"Total Requests (before filtering)",
|
| 63 |
"Excluded Requests (HEAD+Zabbix + 401)",
|
| 64 |
"Processed Requests",
|
| 65 |
"Errors (โ 200, โ 401)",
|
| 66 |
+
f"Slow Requests ({threshold_display})",
|
| 67 |
"Peak RPS",
|
| 68 |
"Peak Timestamp",
|
| 69 |
"Avg Response Time (ms)",
|
|
|
|
| 196 |
return fig
|
| 197 |
|
| 198 |
|
| 199 |
+
def process_log_file(file_path: str, service_name: str = None, slow_threshold: int = 3000) -> dict:
|
| 200 |
"""Process a single log file and return statistics."""
|
| 201 |
parser = IISLogParser(file_path)
|
| 202 |
if service_name:
|
|
|
|
| 210 |
return None
|
| 211 |
|
| 212 |
with st.spinner(f"Analyzing {parser.service_name}..."):
|
| 213 |
+
analyzer = LogAnalyzer(df, parser.service_name, slow_threshold)
|
| 214 |
|
| 215 |
stats = {
|
| 216 |
"summary": analyzer.get_summary_stats(),
|
|
|
|
| 284 |
file_name = uploaded_files[i].name
|
| 285 |
st.subheader(f"๐ {file_name}")
|
| 286 |
|
| 287 |
+
stats = process_log_file(temp_file, None, slow_threshold)
|
| 288 |
if stats:
|
| 289 |
all_stats.append(stats)
|
| 290 |
|
log_parser.py
CHANGED
|
@@ -132,9 +132,10 @@ class IISLogParser:
|
|
| 132 |
class LogAnalyzer:
|
| 133 |
"""Analyze parsed IIS logs and generate performance metrics."""
|
| 134 |
|
| 135 |
-
def __init__(self, df: pl.DataFrame, service_name: str = "Unknown"):
|
| 136 |
self.df = df
|
| 137 |
self.service_name = service_name
|
|
|
|
| 138 |
self._filtered_df = None
|
| 139 |
|
| 140 |
def filter_logs(self) -> pl.DataFrame:
|
|
@@ -180,8 +181,8 @@ class LogAnalyzer:
|
|
| 180 |
(pl.col("sc_status") != 200) & (pl.col("sc_status") != 401)
|
| 181 |
).height
|
| 182 |
|
| 183 |
-
# Count slow requests (
|
| 184 |
-
slow_requests = df.filter(pl.col("time_taken") >
|
| 185 |
|
| 186 |
# Response time statistics
|
| 187 |
time_stats = df.select([
|
|
@@ -201,6 +202,7 @@ class LogAnalyzer:
|
|
| 201 |
"total_requests_after": total_after,
|
| 202 |
"errors": errors,
|
| 203 |
"slow_requests": slow_requests,
|
|
|
|
| 204 |
"min_time_ms": int(time_stats["min_time"]) if time_stats["min_time"] else 0,
|
| 205 |
"max_time_ms": int(time_stats["max_time"]) if time_stats["max_time"] else 0,
|
| 206 |
"avg_time_ms": int(time_stats["avg_time"]) if time_stats["avg_time"] else 0,
|
|
|
|
| 132 |
class LogAnalyzer:
|
| 133 |
"""Analyze parsed IIS logs and generate performance metrics."""
|
| 134 |
|
| 135 |
+
def __init__(self, df: pl.DataFrame, service_name: str = "Unknown", slow_threshold: int = 3000):
|
| 136 |
self.df = df
|
| 137 |
self.service_name = service_name
|
| 138 |
+
self.slow_threshold = slow_threshold
|
| 139 |
self._filtered_df = None
|
| 140 |
|
| 141 |
def filter_logs(self) -> pl.DataFrame:
|
|
|
|
| 181 |
(pl.col("sc_status") != 200) & (pl.col("sc_status") != 401)
|
| 182 |
).height
|
| 183 |
|
| 184 |
+
# Count slow requests (using configured threshold)
|
| 185 |
+
slow_requests = df.filter(pl.col("time_taken") > self.slow_threshold).height
|
| 186 |
|
| 187 |
# Response time statistics
|
| 188 |
time_stats = df.select([
|
|
|
|
| 202 |
"total_requests_after": total_after,
|
| 203 |
"errors": errors,
|
| 204 |
"slow_requests": slow_requests,
|
| 205 |
+
"slow_threshold": self.slow_threshold,
|
| 206 |
"min_time_ms": int(time_stats["min_time"]) if time_stats["min_time"] else 0,
|
| 207 |
"max_time_ms": int(time_stats["max_time"]) if time_stats["max_time"] else 0,
|
| 208 |
"avg_time_ms": int(time_stats["avg_time"]) if time_stats["avg_time"] else 0,
|