Fix UI wiggling loop and add daily keep-alive workflow
Browse files- Cache cleaning pipeline with @st .cache_data to avoid redundant reruns
- Guard CSV re-parsing so uploads are only read when the file changes
- Add explicit key= to all sidebar widgets to stabilize widget identity
- Wrap QueryChat UI in @st .fragment to prevent full-page reruns on chat
- Add GitHub Actions workflow (daily 5 UTC) to keep HF Space awake
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
- .github/workflows/keep_alive.yml +35 -0
- .gitignore +6 -0
- Dockerfile +2 -0
- app.py +72 -39
- src/ui_theme.py +2 -3
.github/workflows/keep_alive.yml
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Keep HF Space Alive
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
schedule:
|
| 5 |
+
- cron: '0 5 * * *' # Daily at 05:00 UTC
|
| 6 |
+
workflow_dispatch: # Allow manual trigger
|
| 7 |
+
|
| 8 |
+
jobs:
|
| 9 |
+
heartbeat:
|
| 10 |
+
runs-on: ubuntu-latest
|
| 11 |
+
steps:
|
| 12 |
+
- uses: actions/checkout@v4
|
| 13 |
+
|
| 14 |
+
- name: Generate heartbeat CSV
|
| 15 |
+
run: |
|
| 16 |
+
APP_LIVE_DATE="2026-02-11"
|
| 17 |
+
TODAY=$(date -u +%Y-%m-%d)
|
| 18 |
+
DAYS=$(( ( $(date -u -d "$TODAY" +%s) - $(date -u -d "$APP_LIVE_DATE" +%s) ) / 86400 ))
|
| 19 |
+
echo "date,days_since_app_went_live" > heartbeat.csv
|
| 20 |
+
echo "${TODAY},${DAYS}" >> heartbeat.csv
|
| 21 |
+
cat heartbeat.csv
|
| 22 |
+
|
| 23 |
+
- name: Commit and push to GitHub
|
| 24 |
+
run: |
|
| 25 |
+
git config user.name "github-actions[bot]"
|
| 26 |
+
git config user.email "github-actions[bot]@users.noreply.github.com"
|
| 27 |
+
git add heartbeat.csv
|
| 28 |
+
git diff --staged --quiet || git commit -m "Daily heartbeat: $(date -u +%Y-%m-%d)"
|
| 29 |
+
git push
|
| 30 |
+
|
| 31 |
+
- name: Sync to HF Spaces
|
| 32 |
+
env:
|
| 33 |
+
HF_TOKEN: ${{ secrets.HF_TOKEN }}
|
| 34 |
+
run: |
|
| 35 |
+
git push "https://fmegahed:${HF_TOKEN}@huggingface.co/spaces/fmegahed/timeseries_visualization" master:main --force
|
.gitignore
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
.env
|
| 2 |
+
.claude/
|
| 3 |
+
__pycache__/
|
| 4 |
+
requirements.md
|
| 5 |
+
*.Rproj
|
| 6 |
+
.Rproj.user/
|
Dockerfile
CHANGED
|
@@ -21,4 +21,6 @@ EXPOSE 7860
|
|
| 21 |
CMD ["streamlit", "run", "app.py", \
|
| 22 |
"--server.port=7860", \
|
| 23 |
"--server.address=0.0.0.0", \
|
|
|
|
|
|
|
| 24 |
"--browser.gatherUsageStats=false"]
|
|
|
|
| 21 |
CMD ["streamlit", "run", "app.py", \
|
| 22 |
"--server.port=7860", \
|
| 23 |
"--server.address=0.0.0.0", \
|
| 24 |
+
"--server.enableXsrfProtection=false", \
|
| 25 |
+
"--server.enableCORS=false", \
|
| 26 |
"--browser.gatherUsageStats=false"]
|
app.py
CHANGED
|
@@ -109,6 +109,29 @@ def _load_demo(path: Path) -> pd.DataFrame:
|
|
| 109 |
return pd.read_csv(path)
|
| 110 |
|
| 111 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 112 |
def _render_cleaning_report(report: CleaningReport) -> None:
|
| 113 |
"""Show a data-quality card."""
|
| 114 |
c1, c2, c3 = st.columns(3)
|
|
@@ -194,6 +217,7 @@ style_dict = get_miami_mpl_style()
|
|
| 194 |
for key in [
|
| 195 |
"raw_df", "cleaned_df", "cleaning_report", "freq_info",
|
| 196 |
"date_col", "y_cols", "qc", "qc_hash",
|
|
|
|
| 197 |
]:
|
| 198 |
if key not in st.session_state:
|
| 199 |
st.session_state[key] = None
|
|
@@ -218,18 +242,23 @@ with st.sidebar:
|
|
| 218 |
st.divider()
|
| 219 |
st.header("Data Input")
|
| 220 |
|
| 221 |
-
uploaded = st.file_uploader("Upload a CSV file", type=["csv", "tsv", "txt"])
|
| 222 |
|
| 223 |
demo_choice = st.selectbox(
|
| 224 |
"Or load a demo dataset",
|
| 225 |
["(none)"] + list(_DEMO_FILES.keys()),
|
|
|
|
| 226 |
)
|
| 227 |
|
| 228 |
# Load data
|
| 229 |
if uploaded is not None:
|
| 230 |
-
|
| 231 |
-
st.
|
| 232 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 233 |
elif demo_choice != "(none)":
|
| 234 |
st.session_state.raw_df = _load_demo(_DEMO_FILES[demo_choice])
|
| 235 |
# else: keep whatever was already in session state
|
|
@@ -247,7 +276,7 @@ with st.sidebar:
|
|
| 247 |
all_cols = list(raw_df.columns)
|
| 248 |
default_date_idx = all_cols.index(date_suggestions[0]) if date_suggestions else 0
|
| 249 |
|
| 250 |
-
date_col = st.selectbox("Date column", all_cols, index=default_date_idx)
|
| 251 |
|
| 252 |
remaining = [c for c in all_cols if c != date_col]
|
| 253 |
default_y = [c for c in numeric_suggestions if c != date_col]
|
|
@@ -255,6 +284,7 @@ with st.sidebar:
|
|
| 255 |
"Value column(s)",
|
| 256 |
remaining,
|
| 257 |
default=default_y[:4] if default_y else [],
|
|
|
|
| 258 |
)
|
| 259 |
|
| 260 |
st.session_state.date_col = date_col
|
|
@@ -265,22 +295,20 @@ with st.sidebar:
|
|
| 265 |
dup_action = st.selectbox(
|
| 266 |
"Duplicate dates",
|
| 267 |
["keep_last", "keep_first", "drop_all"],
|
|
|
|
| 268 |
)
|
| 269 |
missing_action = st.selectbox(
|
| 270 |
"Missing values",
|
| 271 |
["interpolate", "ffill", "drop"],
|
|
|
|
| 272 |
)
|
| 273 |
|
| 274 |
# Clean
|
| 275 |
if y_cols:
|
| 276 |
-
cleaned_df, report =
|
| 277 |
-
raw_df, date_col, y_cols,
|
| 278 |
-
dup_action
|
| 279 |
-
missing_action=missing_action,
|
| 280 |
)
|
| 281 |
-
freq_info = detect_frequency(cleaned_df, date_col)
|
| 282 |
-
cleaned_df = add_time_features(cleaned_df, date_col)
|
| 283 |
-
|
| 284 |
st.session_state.cleaned_df = cleaned_df
|
| 285 |
st.session_state.cleaning_report = report
|
| 286 |
st.session_state.freq_info = freq_info
|
|
@@ -293,6 +321,7 @@ with st.sidebar:
|
|
| 293 |
"Override frequency label (optional)",
|
| 294 |
value="",
|
| 295 |
help="e.g. Daily, Weekly, Monthly, Quarterly, Yearly",
|
|
|
|
| 296 |
)
|
| 297 |
if freq_override.strip():
|
| 298 |
st.session_state.freq_info = FrequencyInfo(
|
|
@@ -303,19 +332,10 @@ with st.sidebar:
|
|
| 303 |
|
| 304 |
# ------ QueryChat ------
|
| 305 |
if check_querychat_available():
|
| 306 |
-
current_hash = _df_hash(cleaned_df) + str(y_cols)
|
| 307 |
-
if st.session_state.qc_hash != current_hash:
|
| 308 |
-
st.session_state.qc = create_querychat(
|
| 309 |
-
cleaned_df,
|
| 310 |
-
name="uploaded data",
|
| 311 |
-
date_col=date_col,
|
| 312 |
-
y_cols=y_cols,
|
| 313 |
-
freq_label=st.session_state.freq_info.label,
|
| 314 |
-
)
|
| 315 |
-
st.session_state.qc_hash = current_hash
|
| 316 |
st.divider()
|
| 317 |
st.subheader("QueryChat")
|
| 318 |
-
|
|
|
|
| 319 |
else:
|
| 320 |
st.divider()
|
| 321 |
st.info(
|
|
@@ -345,7 +365,9 @@ with st.sidebar:
|
|
| 345 |
)
|
| 346 |
st.caption(
|
| 347 |
"**Privacy:** All processing is in-memory. "
|
| 348 |
-
"
|
|
|
|
|
|
|
| 349 |
)
|
| 350 |
|
| 351 |
# ---------------------------------------------------------------------------
|
|
@@ -429,7 +451,26 @@ with tab_single:
|
|
| 429 |
n_colors = max(12, len(y_cols))
|
| 430 |
palette_colors = get_palette_colors(palette_name, n_colors)
|
| 431 |
swatch_fig = render_palette_preview(palette_colors[:8])
|
| 432 |
-
st.pyplot(swatch_fig
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 433 |
|
| 434 |
# Chart-specific controls
|
| 435 |
period_label = "month"
|
|
@@ -460,15 +501,7 @@ with tab_single:
|
|
| 460 |
style_dict=style_dict, palette_colors=palette_colors,
|
| 461 |
)
|
| 462 |
|
| 463 |
-
elif chart_type == "Line – Colored Markers":
|
| 464 |
-
if "month" in df_plot.columns:
|
| 465 |
-
color_by = st.selectbox(
|
| 466 |
-
"Color by",
|
| 467 |
-
["month", "quarter", "year", "day_of_week"],
|
| 468 |
-
key="color_by_a",
|
| 469 |
-
)
|
| 470 |
-
else:
|
| 471 |
-
color_by = st.selectbox("Color by", [c for c in df_plot.columns if c not in (date_col, active_y)][:5], key="color_by_a")
|
| 472 |
fig = plot_line_colored_markers(
|
| 473 |
df_plot, date_col, active_y,
|
| 474 |
color_by=color_by, palette_colors=palette_colors,
|
|
@@ -556,7 +589,7 @@ with tab_single:
|
|
| 556 |
st.error(f"Chart error: {exc}")
|
| 557 |
|
| 558 |
if fig is not None:
|
| 559 |
-
st.pyplot(fig
|
| 560 |
|
| 561 |
# ---- Summary stats expander -------------------------------------------
|
| 562 |
with st.expander("Summary Statistics", expanded=False):
|
|
@@ -566,8 +599,8 @@ with tab_single:
|
|
| 566 |
# ---- AI Interpretation ------------------------------------------------
|
| 567 |
with st.expander("AI Chart Interpretation", expanded=False):
|
| 568 |
st.caption(
|
| 569 |
-
"The chart image (PNG)
|
| 570 |
-
"
|
| 571 |
)
|
| 572 |
if not check_api_key_available():
|
| 573 |
st.warning("Set `OPENAI_API_KEY` to enable AI interpretation.")
|
|
@@ -625,7 +658,7 @@ with tab_few:
|
|
| 625 |
style_dict=style_dict,
|
| 626 |
palette_colors=palette_b,
|
| 627 |
)
|
| 628 |
-
st.pyplot(fig_panel
|
| 629 |
except Exception as exc:
|
| 630 |
st.error(f"Panel chart error: {exc}")
|
| 631 |
|
|
@@ -693,6 +726,6 @@ with tab_many:
|
|
| 693 |
style_dict=style_dict,
|
| 694 |
palette_colors=palette_c,
|
| 695 |
)
|
| 696 |
-
st.pyplot(fig_spag
|
| 697 |
except Exception as exc:
|
| 698 |
st.error(f"Spaghetti chart error: {exc}")
|
|
|
|
| 109 |
return pd.read_csv(path)
|
| 110 |
|
| 111 |
|
| 112 |
+
@st.cache_data(show_spinner=False)
|
| 113 |
+
def _clean_pipeline(_raw_hash, raw_df, date_col, y_cols, dup_action, missing_action):
|
| 114 |
+
cleaned, report = clean_dataframe(raw_df, date_col, list(y_cols),
|
| 115 |
+
dup_action=dup_action,
|
| 116 |
+
missing_action=missing_action)
|
| 117 |
+
freq = detect_frequency(cleaned, date_col)
|
| 118 |
+
cleaned = add_time_features(cleaned, date_col)
|
| 119 |
+
return cleaned, report, freq
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
@st.fragment
|
| 123 |
+
def _querychat_fragment(cleaned_df, date_col, y_cols, freq_label):
|
| 124 |
+
current_hash = _df_hash(cleaned_df) + str(y_cols)
|
| 125 |
+
if st.session_state.qc_hash != current_hash:
|
| 126 |
+
st.session_state.qc = create_querychat(
|
| 127 |
+
cleaned_df, name="uploaded_data",
|
| 128 |
+
date_col=date_col, y_cols=y_cols,
|
| 129 |
+
freq_label=freq_label,
|
| 130 |
+
)
|
| 131 |
+
st.session_state.qc_hash = current_hash
|
| 132 |
+
st.session_state.qc.ui()
|
| 133 |
+
|
| 134 |
+
|
| 135 |
def _render_cleaning_report(report: CleaningReport) -> None:
|
| 136 |
"""Show a data-quality card."""
|
| 137 |
c1, c2, c3 = st.columns(3)
|
|
|
|
| 217 |
for key in [
|
| 218 |
"raw_df", "cleaned_df", "cleaning_report", "freq_info",
|
| 219 |
"date_col", "y_cols", "qc", "qc_hash",
|
| 220 |
+
"_upload_id", "_upload_delim", "cleaned_df_hash",
|
| 221 |
]:
|
| 222 |
if key not in st.session_state:
|
| 223 |
st.session_state[key] = None
|
|
|
|
| 242 |
st.divider()
|
| 243 |
st.header("Data Input")
|
| 244 |
|
| 245 |
+
uploaded = st.file_uploader("Upload a CSV file", type=["csv", "tsv", "txt"], key="csv_upload")
|
| 246 |
|
| 247 |
demo_choice = st.selectbox(
|
| 248 |
"Or load a demo dataset",
|
| 249 |
["(none)"] + list(_DEMO_FILES.keys()),
|
| 250 |
+
key="demo_select",
|
| 251 |
)
|
| 252 |
|
| 253 |
# Load data
|
| 254 |
if uploaded is not None:
|
| 255 |
+
file_id = (uploaded.name, uploaded.size)
|
| 256 |
+
if st.session_state.get("_upload_id") != file_id:
|
| 257 |
+
df_raw, delim = read_csv_upload(uploaded)
|
| 258 |
+
st.session_state.raw_df = df_raw
|
| 259 |
+
st.session_state._upload_delim = delim
|
| 260 |
+
st.session_state._upload_id = file_id
|
| 261 |
+
st.caption(f"Detected delimiter: `{repr(st.session_state._upload_delim)}`")
|
| 262 |
elif demo_choice != "(none)":
|
| 263 |
st.session_state.raw_df = _load_demo(_DEMO_FILES[demo_choice])
|
| 264 |
# else: keep whatever was already in session state
|
|
|
|
| 276 |
all_cols = list(raw_df.columns)
|
| 277 |
default_date_idx = all_cols.index(date_suggestions[0]) if date_suggestions else 0
|
| 278 |
|
| 279 |
+
date_col = st.selectbox("Date column", all_cols, index=default_date_idx, key="sidebar_date_col")
|
| 280 |
|
| 281 |
remaining = [c for c in all_cols if c != date_col]
|
| 282 |
default_y = [c for c in numeric_suggestions if c != date_col]
|
|
|
|
| 284 |
"Value column(s)",
|
| 285 |
remaining,
|
| 286 |
default=default_y[:4] if default_y else [],
|
| 287 |
+
key="sidebar_y_cols",
|
| 288 |
)
|
| 289 |
|
| 290 |
st.session_state.date_col = date_col
|
|
|
|
| 295 |
dup_action = st.selectbox(
|
| 296 |
"Duplicate dates",
|
| 297 |
["keep_last", "keep_first", "drop_all"],
|
| 298 |
+
key="sidebar_dup_action",
|
| 299 |
)
|
| 300 |
missing_action = st.selectbox(
|
| 301 |
"Missing values",
|
| 302 |
["interpolate", "ffill", "drop"],
|
| 303 |
+
key="sidebar_missing_action",
|
| 304 |
)
|
| 305 |
|
| 306 |
# Clean
|
| 307 |
if y_cols:
|
| 308 |
+
cleaned_df, report, freq_info = _clean_pipeline(
|
| 309 |
+
_df_hash(raw_df), raw_df, date_col, tuple(y_cols),
|
| 310 |
+
dup_action, missing_action,
|
|
|
|
| 311 |
)
|
|
|
|
|
|
|
|
|
|
| 312 |
st.session_state.cleaned_df = cleaned_df
|
| 313 |
st.session_state.cleaning_report = report
|
| 314 |
st.session_state.freq_info = freq_info
|
|
|
|
| 321 |
"Override frequency label (optional)",
|
| 322 |
value="",
|
| 323 |
help="e.g. Daily, Weekly, Monthly, Quarterly, Yearly",
|
| 324 |
+
key="sidebar_freq_override",
|
| 325 |
)
|
| 326 |
if freq_override.strip():
|
| 327 |
st.session_state.freq_info = FrequencyInfo(
|
|
|
|
| 332 |
|
| 333 |
# ------ QueryChat ------
|
| 334 |
if check_querychat_available():
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 335 |
st.divider()
|
| 336 |
st.subheader("QueryChat")
|
| 337 |
+
_querychat_fragment(cleaned_df, date_col, y_cols,
|
| 338 |
+
st.session_state.freq_info.label)
|
| 339 |
else:
|
| 340 |
st.divider()
|
| 341 |
st.info(
|
|
|
|
| 365 |
)
|
| 366 |
st.caption(
|
| 367 |
"**Privacy:** All processing is in-memory. "
|
| 368 |
+
"If you click **Interpret Chart with AI**, the chart image is sent to OpenAI — "
|
| 369 |
+
"do not include sensitive data in your charts. "
|
| 370 |
+
"QueryChat protects your privacy by only passing metadata (not your data) to OpenAI."
|
| 371 |
)
|
| 372 |
|
| 373 |
# ---------------------------------------------------------------------------
|
|
|
|
| 451 |
n_colors = max(12, len(y_cols))
|
| 452 |
palette_colors = get_palette_colors(palette_name, n_colors)
|
| 453 |
swatch_fig = render_palette_preview(palette_colors[:8])
|
| 454 |
+
st.pyplot(swatch_fig)
|
| 455 |
+
|
| 456 |
+
# Color-by control (for colored markers chart)
|
| 457 |
+
color_by = None
|
| 458 |
+
if chart_type == "Line – Colored Markers":
|
| 459 |
+
if "month" in working_df.columns:
|
| 460 |
+
color_by = st.selectbox(
|
| 461 |
+
"Color by",
|
| 462 |
+
["month", "quarter", "year", "day_of_week"],
|
| 463 |
+
key="color_by_a",
|
| 464 |
+
)
|
| 465 |
+
else:
|
| 466 |
+
other_cols = [
|
| 467 |
+
c for c in working_df.columns
|
| 468 |
+
if c not in (date_col, active_y)
|
| 469 |
+
][:5]
|
| 470 |
+
if other_cols:
|
| 471 |
+
color_by = st.selectbox(
|
| 472 |
+
"Color by", other_cols, key="color_by_a",
|
| 473 |
+
)
|
| 474 |
|
| 475 |
# Chart-specific controls
|
| 476 |
period_label = "month"
|
|
|
|
| 501 |
style_dict=style_dict, palette_colors=palette_colors,
|
| 502 |
)
|
| 503 |
|
| 504 |
+
elif chart_type == "Line – Colored Markers" and color_by is not None:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 505 |
fig = plot_line_colored_markers(
|
| 506 |
df_plot, date_col, active_y,
|
| 507 |
color_by=color_by, palette_colors=palette_colors,
|
|
|
|
| 589 |
st.error(f"Chart error: {exc}")
|
| 590 |
|
| 591 |
if fig is not None:
|
| 592 |
+
st.pyplot(fig)
|
| 593 |
|
| 594 |
# ---- Summary stats expander -------------------------------------------
|
| 595 |
with st.expander("Summary Statistics", expanded=False):
|
|
|
|
| 599 |
# ---- AI Interpretation ------------------------------------------------
|
| 600 |
with st.expander("AI Chart Interpretation", expanded=False):
|
| 601 |
st.caption(
|
| 602 |
+
"The chart image (PNG) is sent to OpenAI for interpretation. "
|
| 603 |
+
"Do not include sensitive data in your charts."
|
| 604 |
)
|
| 605 |
if not check_api_key_available():
|
| 606 |
st.warning("Set `OPENAI_API_KEY` to enable AI interpretation.")
|
|
|
|
| 658 |
style_dict=style_dict,
|
| 659 |
palette_colors=palette_b,
|
| 660 |
)
|
| 661 |
+
st.pyplot(fig_panel)
|
| 662 |
except Exception as exc:
|
| 663 |
st.error(f"Panel chart error: {exc}")
|
| 664 |
|
|
|
|
| 726 |
style_dict=style_dict,
|
| 727 |
palette_colors=palette_c,
|
| 728 |
)
|
| 729 |
+
st.pyplot(fig_spag)
|
| 730 |
except Exception as exc:
|
| 731 |
st.error(f"Spaghetti chart error: {exc}")
|
src/ui_theme.py
CHANGED
|
@@ -72,9 +72,8 @@ def apply_miami_theme() -> None:
|
|
| 72 |
box-shadow: none;
|
| 73 |
}}
|
| 74 |
|
| 75 |
-
/* ----
|
| 76 |
-
div[data-testid="stExpander"]
|
| 77 |
-
div[data-testid="stHorizontalBlock"] > div {{
|
| 78 |
border: 1px solid {_BORDER_GRAY};
|
| 79 |
border-radius: 8px;
|
| 80 |
padding: 0.75rem;
|
|
|
|
| 72 |
box-shadow: none;
|
| 73 |
}}
|
| 74 |
|
| 75 |
+
/* ---- Expander card borders ---- */
|
| 76 |
+
div[data-testid="stExpander"] {{
|
|
|
|
| 77 |
border: 1px solid {_BORDER_GRAY};
|
| 78 |
border-radius: 8px;
|
| 79 |
padding: 0.75rem;
|