dwd / server.R
alexdum's picture
refactor: remove inline selected station styling from main layer and add post-render re-highlight.
6362620
# server.R for DWD App
# Helper function for label generation
generate_station_label <- function(name, id, state, start_date, end_date, detailed_summary,
overall_start = NULL, overall_end = NULL, resolution = NULL) {
tryCatch(
{
format_range <- function(start_val, end_val) {
format_val <- function(value, is_end = FALSE) {
if (is.null(value) || is.na(value) || value == "") {
return("Unknown")
}
if (inherits(value, "Date")) {
return(format(value, "%Y-%m-%d"))
}
value <- as.character(value)
if (value == "99999999") {
return(if (is_end) "Present" else "Unknown")
}
if (value == "00000000") {
return("Unknown")
}
if (grepl("^\\d{4}-\\d{2}-\\d{2}$", value)) {
d <- as.Date(value)
} else {
d <- as.Date(value, format = "%Y%m%d")
}
if (is.na(d)) {
return("Unknown")
}
format(d, "%Y-%m-%d")
}
paste0(format_val(start_val, FALSE), " to ", format_val(end_val, TRUE))
}
res_label <- if (is.null(resolution) || is.na(resolution) || resolution == "") {
"Data"
} else {
paste0(toupper(substr(resolution, 1, 1)), substr(resolution, 2, nchar(resolution)))
}
station_range <- format_range(start_date, end_date)
coverage_start <- if (is.null(overall_start)) start_date else overall_start
coverage_end <- if (is.null(overall_end)) end_date else overall_end
coverage_range <- format_range(coverage_start, coverage_end)
summary_text <- if (is.null(detailed_summary) || is.na(detailed_summary) || detailed_summary == "") {
"No Data"
} else {
detailed_summary
}
paste0(
"<div style='font-size:18px; max-width: 400px;'>",
"<b>", htmltools::htmlEscape(name), "</b> (", id, ")<br>",
"<span style='font-size:80%;'>State: ", htmltools::htmlEscape(state), "</span><br>",
"<span style='font-size:80%; color:#555;'>",
"Station Active: ", station_range,
"</span><br>",
"<span style='font-size:80%; color:#555;'>",
res_label, " Coverage: ", coverage_range,
"</span><br>",
"<div style='font-size:80%; color:#333; font-weight:bold; margin-top:5px;'>",
"Data Availability (", res_label, "):</div>",
"<div style='font-size:75%; color:#333; line-height: 1.1;'>• ",
paste0(
gsub(", ", "<br>• ", htmltools::htmlEscape(summary_text)),
"<br><small><i>Note: 'Present' indicates ongoing monitoring;<br>",
"some parameters may have<br>",
"upload delays.</i></small>"
),
"</div>",
"</div>"
)
},
error = function(e) {
paste0("<b>", htmltools::htmlEscape(name), "</b> (", id, ")")
}
)
}
server <- function(input, output, session) {
# --- Reactive Values ---
current_station_id <- reactiveVal(NULL)
station_data <- reactiveVal(NULL) # The loaded data
loading_status <- reactiveVal(FALSE)
loading_diagnostics <- reactiveVal("")
previous_station_choices <- reactiveVal(NULL) # Track previous choices to avoid blink
previous_date_range <- reactiveVal(NULL) # Track previous date range for bi-directional sync
url_initialized <- reactiveVal(FALSE)
# MapLibre Reactive Values
style_change_trigger <- reactiveVal(0) # Trigger station re-render after style change
map_initialized <- reactiveVal(FALSE) # Track if map bounds have been set
stations_before_id <- reactiveVal(NULL) # Layer ID for station ordering
current_raster_layers <- reactiveVal(character(0)) # Track dynamically added raster layers
# Map bounds for Germany
map_bounds <- list(
lng_min = 5.5,
lat_min = 47.0,
lng_max = 15.5,
lat_max = 55.5
)
# --- URL Parameter Parsing ---
parse_url_params <- function(query) {
params <- list()
if (length(query) == 0) {
return(params)
}
# Helper to decode
decode <- function(x) URLdecode(x)
# Parse query string
pairs <- strsplit(query, "&")[[1]]
for (pair in pairs) {
parts <- strsplit(pair, "=")[[1]]
if (length(parts) == 2) {
key <- parts[1]
val <- decode(parts[2])
params[[key]] <- val
}
}
params
}
# Observer: Apply URL params on app startup
observe({
req(!url_initialized())
query <- session$clientData$url_search
# Wait for query to be available (it might be empty string initially)
# But we also want to handle no-params case to set initialized=TRUE
# Parse query (even if empty to confirm no params)
# Note: session$clientData$url_search usually starts with "?"
q_str <- sub("^\\?", "", query)
params <- parse_url_params(q_str)
# If params exist, apply them
if (length(params) > 0) {
# 1. Resolution
if (!is.null(params$resolution)) {
updateRadioButtons(session, "data_resolution", selected = params$resolution)
}
# 2. Date Range
if (!is.null(params$start) && !is.null(params$end)) {
updateDateRangeInput(session, "date_range", start = params$start, end = params$end)
}
# 3. State/Land Selection
if (!is.null(params$landname)) {
updateSelectizeInput(session, "state_selector", selected = params$landname)
}
# 4. Station Selection
if (!is.null(params$station)) {
station_ref <- params$station
# We need to wait for stations to load?
# The all_stations() reactive depends on resolution.
# Just updating the input might trigger the search.
# Note: station_selector choices are updated dynamically.
# We might need to handle this carefully if stations aren't loaded yet.
# However, updateSelectizeInput usually works nicely.
# Check if we need to set selected
shinyjs::delay(500, {
# Try to find ID if name was passed
# For now just set the value, assuming ID or Name match
# DWD station selector choices are "Name (ID)" = ID.
# If passed param is a name, we might need to lookup.
# But if we rely on broadcast sending Name, URL has Name.
# DWD selector needs ID.
# Search in current all_stations()
current_st <- isolate(all_stations())
if (!is.null(current_st)) {
# Try match ID
match <- current_st %>% filter(id == station_ref)
if (nrow(match) == 0) {
# Try match Name
match <- current_st %>% filter(name == station_ref)
}
if (nrow(match) > 0) {
target_id <- match$id[1]
updateSelectizeInput(session, "station_selector", selected = target_id)
# Also trigger details panel if view implies it
# The observer for station_selector will run and trigger map logic
}
}
})
}
# 4. View/Tab
if (!is.null(params$view)) {
view <- params$view
shinyjs::delay(800, {
if (view == "map") {
nav_select("main_nav", "Map View") # using bslib way or updateNavbarPage
# DWD ui uses page_navbar id="main_nav".
# updateNavbarPage(session, "main_nav", selected = "Map View") should work?
# Or custom message if updateNavbarPage isn't standard in bslib setups?
# UI line 3: id="main_nav".
} else if (view == "station-info") {
updateNavbarPage(session, "main_nav", selected = "Stations Info")
} else if (grepl("dashboard", view)) {
updateNavbarPage(session, "main_nav", selected = "Dashboard")
if (view == "dashboard-data") {
shinyjs::delay(200, {
nav_select("dashboard_subtabs", "Data")
# navset_card_pill id="dashboard_subtabs"
})
}
}
})
}
}
url_initialized(TRUE)
})
# --- Reactive Data Sources (Resolution Dependent) ---
observeEvent(input$data_resolution,
{
session$sendCustomMessage("freezeUI", list(text = "Switching resolution..."))
# Update date range based on resolution
res <- tolower(input$data_resolution)
tryCatch(
{
new_end <- Sys.Date()
if (res %in% c("monthly", "daily")) {
# For Monthly/Daily: default to last 6 years
new_start <- new_end - (365 * 6)
} else {
# For Hourly: default to last 1 year (366 days)
new_start <- new_end - 366
}
updateDateRangeInput(session, "date_range", start = new_start, end = new_end)
},
error = function(e) {
message("Note: Could not update date range: ", e$message)
}
)
},
priority = 1000,
ignoreInit = TRUE
)
current_index <- reactive({
req(input$data_resolution)
# Identify resolution (default hourly)
res <- tolower(input$data_resolution)
# Load File Index (for data fetching only)
load_dwd_index(res)
})
all_stations <- reactive({
req(input$data_resolution)
res <- tolower(input$data_resolution)
# Load enriched stations from pre-generated cache
load_enriched_stations(res)
})
# Observe and Update State Selector
observe({
st <- all_stations()
req(st)
# Get unique states
states <- sort(unique(st$state))
# Update choice - preserve selection if possible (handled by Shiny usually)
# We use server=TRUE for performance if list is long (states is short but good practice)
updateSelectizeInput(session, "state_selector", choices = states, server = TRUE)
})
# --- Freeze UI During Data Loading ---
# Disable all interactive controls (except map hovering) while parsing data
observe({
is_loading <- loading_status()
# List of input IDs to disable/enable
inputs_to_toggle <- c(
"state_selector",
"station_selector",
"date_range",
"zoom_home",
"main_nav",
"download_hourly"
)
if (is_loading) {
for (inp in inputs_to_toggle) {
shinyjs::disable(inp)
}
} else {
for (inp in inputs_to_toggle) {
shinyjs::enable(inp)
}
}
})
# Filtered stations based on Sidebar inputs
filtered_stations <- reactive({
req(all_stations(), input$date_range)
df <- all_stations()
# Filter by Date (Strict File Availability)
# We check if the station has specific data files overlapping the range
range_start <- as.numeric(format(input$date_range[1], "%Y%m%d"))
range_end <- as.numeric(format(input$date_range[2], "%Y%m%d"))
# Filter by State
if (!is.null(input$state_selector) && length(input$state_selector) > 0) {
df <- df %>% filter(state %in% input$state_selector)
}
df <- df %>% filter(
as.numeric(station_overall_start) <= range_end &
as.numeric(station_overall_end) >= range_start
)
# Defensive: atomic data.frame to prevent tibble/rowwise issues
as.data.frame(df)
})
# Initialize / Update Station Selector Choices
observe({
df <- filtered_stations()
req(df)
# Create choices: "Station Name (ID)" = "ID"
# Defensive: Ensure atomic vectors
ids <- as.character(df$id)
names <- paste0(as.character(df$name), " (", ids, ")")
if (length(ids) > 0) {
new_choices <- setNames(ids, names)
} else {
new_choices <- character(0)
}
# Only update if choices have actually changed (compare IDs)
prev_choices <- previous_station_choices()
new_ids <- sort(unname(new_choices))
prev_ids <- if (!is.null(prev_choices)) sort(unname(prev_choices)) else NULL
if (is.null(prev_ids) || !identical(new_ids, prev_ids)) {
# Preserve selection if still in filtered list
current_sel <- input$station_selector
updateSelectizeInput(session, "station_selector",
choices = new_choices,
selected = current_sel,
server = TRUE
)
previous_station_choices(new_choices)
}
})
# Helper: Broadcast current state to parent page
broadcast_state <- function(view_override = NULL) {
# Get active station
sid <- current_station_id()
st_meta <- NULL
if (!is.null(sid)) {
all <- isolate(all_stations()) # Use isolate to avoid dependency loop if called inside observer?
# Actually active observers call this, so it's fine.
if (!is.null(all)) {
st_meta <- all %>%
filter(id == sid) %>%
head(1)
}
}
station_id <- if (!is.null(st_meta) && nrow(st_meta) > 0) as.character(st_meta$id) else NULL
station_name <- if (!is.null(st_meta) && nrow(st_meta) > 0) as.character(st_meta$name) else NULL
landname <- if (!is.null(st_meta) && nrow(st_meta) > 0) as.character(st_meta$state) else NULL
# Ensure we have a valid resolution string (Capitalized from UI?)
resolution <- input$data_resolution
# Determine current view
main_tab <- input$main_nav
view <- if (!is.null(view_override)) {
view_override
} else if (!is.null(main_tab)) {
if (main_tab == "Map View") {
"map"
} else if (main_tab == "Stations Info") {
"station-info"
} else if (main_tab == "Dashboard") {
subtab <- input$dashboard_subtabs
# If subtab is NULL/loading, default to plots
if (!is.null(subtab) && subtab == "Data") {
"dashboard-data"
} else {
"dashboard-plots"
}
} else {
"map"
}
} else {
"map"
}
start_date <- if (!is.null(input$date_range)) as.character(input$date_range[1]) else NULL
end_date <- if (!is.null(input$date_range)) as.character(input$date_range[2]) else NULL
session$sendCustomMessage("updateParentURL", list(
station = station_id,
stationName = station_name,
landname = landname,
resolution = resolution,
view = view,
start = start_date,
end = end_date
))
# Debug notification
# showNotification(paste("Broadcasting:", station_name), duration = 3, type = "message")
}
# --- Selection Logic via Dropdown ---
observeEvent(input$station_selector, {
req(input$station_selector)
id_val <- input$station_selector
# Find station details
s_meta <- all_stations() %>% filter(id == id_val)
req(nrow(s_meta) > 0)
lat_val <- s_meta$latitude[1]
lng_val <- s_meta$longitude[1]
# Set ID
# Optimization: If ID is same as current, do not re-trigger date resets or map moves
# This prevents circular loops where Date Filter -> Filtered List -> Update Selector -> Trigger Observer -> Reset Date
prev_id <- current_station_id()
if (!is.null(prev_id) && prev_id == id_val) {
return()
}
current_station_id(id_val)
# Highlight & Zoom
# Generate Label
res <- if (is.null(input$data_resolution)) NULL else tolower(input$data_resolution)
overall_start <- if ("station_overall_start" %in% names(s_meta)) s_meta$station_overall_start[1] else s_meta$start_date[1]
overall_end <- if ("station_overall_end" %in% names(s_meta)) s_meta$station_overall_end[1] else s_meta$end_date[1]
lbl <- generate_station_label(
s_meta$name[1], s_meta$id[1], s_meta$state[1],
s_meta$start_date[1], s_meta$end_date[1], s_meta$detailed_summary[1],
overall_start, overall_end, res
)
highlight_selected_station(maplibre_proxy("map"), lng_val, lat_val, lbl)
# Trigger Panel
output$show_details_panel <- reactive(TRUE)
outputOptions(output, "show_details_panel", suspendWhenHidden = FALSE)
# NOTE: Date reset removed to preserve user context
})
# Station Count
output$station_count_filtered <- renderText({
n <- nrow(filtered_stations())
paste(n, "stations found")
})
# --- Map ---
output$map <- renderMaplibre({
maplibre(
style = "https://basemaps.cartocdn.com/gl/positron-gl-style/style.json",
center = c(10.45, 51.16),
zoom = 6
) %>%
add_navigation_control(show_compass = FALSE, visualize_pitch = FALSE, position = "top-left")
})
# Initialize map bounds only once
observe({
req(!map_initialized())
req(input$map_zoom)
maplibre_proxy("map") %>%
fit_bounds(
c(map_bounds$lng_min, map_bounds$lat_min, map_bounds$lng_max, map_bounds$lat_max)
)
map_initialized(TRUE)
})
# Zoom to extent of all filtered stations
observeEvent(input$zoom_home, {
df <- filtered_stations()
req(df)
if (nrow(df) > 0) {
# Calculate bounds
lons <- range(df$longitude, na.rm = TRUE)
lats <- range(df$latitude, na.rm = TRUE)
# If only one station, zoom to it with a small buffer
if (nrow(df) == 1) {
maplibre_proxy("map") %>%
fly_to(center = c(df$longitude[1], df$latitude[1]), zoom = 12)
} else {
maplibre_proxy("map") %>%
fit_bounds(c(lons[1], lats[1], lons[2], lats[2]), animate = TRUE)
}
}
})
# Zoom to extent when state selection changes
observeEvent(input$state_selector,
{
df <- filtered_stations()
req(df)
if (nrow(df) > 0) {
# Calculate bounds
lons <- range(df$longitude, na.rm = TRUE)
lats <- range(df$latitude, na.rm = TRUE)
if (nrow(df) == 1) {
maplibre_proxy("map") %>%
fly_to(center = c(df$longitude[1], df$latitude[1]), zoom = 12)
} else {
maplibre_proxy("map") %>%
fit_bounds(c(lons[1], lats[1], lons[2], lats[2]), animate = TRUE)
}
}
},
ignoreInit = TRUE
)
# Fix Map Rendering on Tab Switch
observeEvent(input$main_nav, {
if (input$main_nav == "Map View") {
# Slight delay to ensure tab is visible - MapLibre handles resize automatically
shinyjs::runjs("
setTimeout(function() {
var map = document.getElementById('map');
if (map && map.__mapgl) {
map.__mapgl.resize();
}
}, 200);
")
}
})
# --- Basemap Switching ---
observeEvent(input$basemap, {
proxy <- maplibre_proxy("map")
# Explicitly remove any previously added raster layers
old_layers <- isolate(current_raster_layers())
if (length(old_layers) > 0) {
for (layer_id in old_layers) {
proxy %>% clear_layer(layer_id)
}
current_raster_layers(character(0))
}
if (input$basemap %in% c("carto_positron", "carto_voyager", "esri_imagery")) {
# VECTOR LOGIC (Carto-based styles)
style_url <- switch(input$basemap,
"carto_positron" = "https://basemaps.cartocdn.com/gl/positron-gl-style/style.json",
"carto_voyager" = "https://basemaps.cartocdn.com/gl/voyager-gl-style/style.json",
"esri_imagery" = "https://basemaps.cartocdn.com/gl/voyager-gl-style/style.json"
)
proxy %>%
set_style(style_url)
# For vector sandwich, we want stations below labels
stations_before_id("watername_ocean")
# For Esri Imagery: Insert satellite raster layer below the vector style's features
if (input$basemap == "esri_imagery") {
current_session <- shiny::getDefaultReactiveDomain()
selected_basemap <- input$basemap
later::later(function() {
shiny::withReactiveDomain(current_session, {
# Race condition check
current_basemap <- isolate(input$basemap)
if (current_basemap != selected_basemap) {
return()
}
unique_suffix <- as.numeric(Sys.time()) * 1000
source_id <- paste0("esri_imagery_source_", unique_suffix)
layer_id <- paste0("esri_imagery_layer_", unique_suffix)
esri_url <- "https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}"
maplibre_proxy("map") %>%
add_raster_source(id = source_id, tiles = c(esri_url), tileSize = 256) %>%
add_layer(
id = layer_id,
type = "raster",
source = source_id,
paint = list("raster-opacity" = 1),
before_id = "watername_ocean"
)
current_raster_layers(c(layer_id))
style_change_trigger(isolate(style_change_trigger()) + 1)
})
}, delay = 0.5)
} else {
style_change_trigger(isolate(style_change_trigger()) + 1)
}
} else {
# RASTER LOGIC (Esri Topo, OSM)
tile_url <- if (input$basemap %in% c("osm", "osm_gray")) {
"https://tile.openstreetmap.org/{z}/{x}/{y}.png"
} else {
"https://server.arcgisonline.com/ArcGIS/rest/services/World_Topo_Map/MapServer/tile/{z}/{y}/{x}"
}
attribution_text <- if (input$basemap %in% c("osm", "osm_gray")) {
'&copy; <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors'
} else {
"Tiles &copy; Esri"
}
paint_props <- list("raster-opacity" = 1)
if (input$basemap == "osm_gray") {
paint_props[["raster-saturation"]] <- -0.9
paint_props[["raster-contrast"]] <- 0.3
}
# Use blank style + raster layer
blank_style <- list(
version = 8,
sources = list(),
layers = list(),
metadata = list(timestamp = as.numeric(Sys.time()))
)
json_blank <- jsonlite::toJSON(blank_style, auto_unbox = TRUE)
blank_uri <- paste0("data:application/json,", URLencode(as.character(json_blank), reserved = TRUE))
proxy %>%
set_style(blank_uri)
current_session <- shiny::getDefaultReactiveDomain()
selected_basemap <- input$basemap
later::later(function() {
shiny::withReactiveDomain(current_session, {
current_basemap <- isolate(input$basemap)
if (current_basemap != selected_basemap) {
return()
}
unique_suffix <- as.numeric(Sys.time()) * 1000
source_id <- paste0("raster_source_", unique_suffix)
layer_id <- paste0("raster_layer_", unique_suffix)
maplibre_proxy("map") %>%
add_raster_source(id = source_id, tiles = c(tile_url), tileSize = 256, attribution = attribution_text) %>%
add_layer(
id = layer_id,
type = "raster",
source = source_id,
paint = paint_props
)
stations_before_id(NULL)
current_raster_layers(c(layer_id))
style_change_trigger(isolate(style_change_trigger()) + 1)
})
}, delay = 0.5)
}
})
# Toggle Labels visibility
observeEvent(input$show_labels,
{
visibility <- if (input$show_labels) "visible" else "none"
label_layers <- c(
"place_villages", "place_town", "place_country_2", "place_country_1",
"place_state", "place_continent",
"place_city_r6", "place_city_r5", "place_city_dot_r7", "place_city_dot_r4",
"place_city_dot_r2", "place_city_dot_z7",
"place_capital_dot_z7", "place_capital",
"roadname_minor", "roadname_sec", "roadname_pri", "roadname_major",
"motorway_name",
"watername_ocean", "watername_sea", "watername_lake", "watername_lake_line",
"poi_stadium", "poi_park", "poi_zoo",
"airport_label",
"country-label", "state-label", "settlement-major-label", "settlement-minor-label",
"settlement-subdivision-label", "road-label", "waterway-label", "natural-point-label",
"poi-label", "airport-label"
)
proxy <- maplibre_proxy("map")
for (layer_id in label_layers) {
tryCatch(
{
proxy <- proxy %>% set_layout_property(layer_id, "visibility", visibility)
},
error = function(e) {
# Layer may not exist in current style, ignore silently
}
)
}
},
ignoreInit = TRUE
)
# Update Markers
# Optimize: Only update when data actually changes, not on tab switch
# Also depends on style_change_trigger to re-add layer after style change
observe({
df <- filtered_stations()
req(df, input$data_resolution)
# Wait for map to be ready before adding markers
req(map_initialized())
# Add dependency on style_change_trigger to re-add layer after style change
style_change_trigger()
# We do NOT check input$main_nav here. We want markers to persist/update
# regardless of view, so they are ready when user switches back.
# This prevents the "reload" (clear + re-add) effect on tab switch.
res <- tolower(input$data_resolution)
n_rows <- nrow(df)
coverage_start <- if ("station_overall_start" %in% names(df)) df$station_overall_start else df$start_date
coverage_end <- if ("station_overall_end" %in% names(df)) df$station_overall_end else df$end_date
# Pre-process label with HTML using Helper
df$popup_content <- purrr::pmap_chr(
list(
df$name, df$id, df$state, df$start_date, df$end_date,
df$detailed_summary, coverage_start, coverage_end, rep(res, n_rows)
),
generate_station_label
)
if (nrow(df) > 0) {
# Standard styling for all stations (selection is handled by separate highlight layer)
df <- df %>%
mutate(
circle_color = "navy",
circle_radius = 6,
circle_stroke_color = "#00000000",
circle_stroke_width = 0
)
# Convert to sf
map_data <- st_as_sf(df, coords = c("longitude", "latitude"), crs = 4326)
# Apply to map
maplibre_proxy("map") %>%
clear_layer("stations") %>%
add_circle_layer(
id = "stations",
source = map_data,
circle_color = get_column("circle_color"),
circle_radius = get_column("circle_radius"),
circle_stroke_color = get_column("circle_stroke_color"),
circle_stroke_width = get_column("circle_stroke_width"),
circle_opacity = 0.7,
tooltip = get_column("popup_content"),
before_id = stations_before_id()
)
# Re-highlight selected station if present (to update label with new resolution)
sid <- isolate(current_station_id())
if (!is.null(sid)) {
sel_row <- df %>% filter(id == sid)
if (nrow(sel_row) > 0) {
highlight_selected_station(maplibre_proxy("map"), sel_row$longitude[1], sel_row$latitude[1], sel_row$popup_content[1], move_map = FALSE)
} else {
# Station no longer in filtered set, clear highlight
maplibre_proxy("map") %>% clear_layer("selected-highlight")
}
}
}
# Unfreeze after markers are updated
session$sendCustomMessage("unfreezeUI", list())
})
# --- Selection Logic ---
observeEvent(input$map_feature_click, {
clicked_data <- input$map_feature_click
# Check if the click was on the "stations" layer
if (!is.null(clicked_data) && (isTRUE(clicked_data$layer_id == "stations") || isTRUE(clicked_data$layer == "stations"))) {
# The ID is in the properties
id_val <- clicked_data$properties$id
if (!is.null(id_val)) {
# Set ID
current_station_id(id_val)
# Highlight & Zoom
# Fetch meta for label
s_meta <- all_stations() %>% filter(id == id_val)
if (nrow(s_meta) > 0) {
lng <- s_meta$longitude[1]
lat <- s_meta$latitude[1]
res <- if (is.null(input$data_resolution)) NULL else tolower(input$data_resolution)
overall_start <- if ("station_overall_start" %in% names(s_meta)) s_meta$station_overall_start[1] else s_meta$start_date[1]
overall_end <- if ("station_overall_end" %in% names(s_meta)) s_meta$station_overall_end[1] else s_meta$end_date[1]
lbl <- generate_station_label(
s_meta$name[1], s_meta$id[1], s_meta$state[1],
s_meta$start_date[1], s_meta$end_date[1], s_meta$detailed_summary[1],
overall_start, overall_end, res
)
highlight_selected_station(maplibre_proxy("map"), lng, lat, lbl)
}
# Sync dropdown
updateSelectizeInput(session, "station_selector", selected = id_val)
}
}
})
# --- Date Synchronization (Sliding Window & Bi-Directional) ---
# 2. Sidebar constraints: 28 <= days <= max (366 for hourly/daily, 3652 for monthly)
observeEvent(input$date_range, {
req(input$date_range)
d_start <- input$date_range[1]
d_end <- input$date_range[2]
# Validate inputs are not NA
if (any(is.na(c(d_start, d_end)))) {
return()
}
req(input$data_resolution)
# Get previous values for change detection
prev <- previous_date_range()
# If this is the first observation, just store the current values and return
if (is.null(prev)) {
previous_date_range(c(d_start, d_end))
return()
}
prev_start <- prev[1]
prev_end <- prev[2]
# Calculate current diff
diff <- as.numeric(difftime(d_end, d_start, units = "days"))
# Defensive check for NA diff
if (is.na(diff)) {
return()
}
# Max window depends on resolution:
# Monthly/Daily: ~10 years (3652 days) to allow 6-year views
# Hourly: 1 year (366 days)
res <- tolower(input$data_resolution)
is_extended <- res %in% c("monthly", "daily")
max_days <- if (is_extended) 3652 else 366
min_days <- 28
if (diff > max_days || diff < min_days) {
# Detect which date changed
start_changed <- !isTRUE(all.equal(d_start, prev_start))
end_changed <- !isTRUE(all.equal(d_end, prev_end))
if (end_changed && !start_changed) {
# User changed end date -> adjust start date
target_diff <- if (diff > max_days) max_days else min_days
new_start <- d_end - target_diff
freezeReactiveValue(input, "date_range")
updateDateRangeInput(session, "date_range", start = new_start, end = d_end)
# Update tracked state
previous_date_range(c(new_start, d_end))
} else {
# User changed start date (or both) -> adjust end date
target_diff <- if (diff > max_days) max_days else min_days
new_end <- d_start + target_diff
freezeReactiveValue(input, "date_range")
updateDateRangeInput(session, "date_range", start = d_start, end = new_end)
# Update tracked state
previous_date_range(c(d_start, new_end))
}
} else {
# Update tracked state
previous_date_range(c(d_start, d_end))
}
})
# --- Async Fetching State Machine ---
# State Variables
fetch_stage <- reactiveVal(0) # 0=Idle, 1=Init, 2=NextFile, 3=Head, 4=Download, 5=Parse, 6=Merge
fetch_message <- reactiveVal("Idle")
fetch_queue <- reactiveVal(list()) # List of targets (url, etc.)
fetch_queue_idx <- reactiveVal(0)
parsed_data_list <- reactiveVal(list()) # Accumulate dataframes
# Progress Tracking
fetch_total_size <- reactiveVal(0)
fetch_current_pos <- reactiveVal(0)
fetch_current_token <- reactiveVal(NULL) # To invalidate stale sessions
fetch_temp_file <- reactiveVal(NULL)
# --- Immediate State Clearing to Prevent Main Thread Blocking ---
# When inputs change, clear data IMMEDIATELY to prevent plots from
# rendering old data with new settings (e.g. Daily data -> Hourly plots = 3000+ traces)
# This covers the 500ms debounce gap.
observeEvent(input$data_resolution,
{
station_data(NULL)
parsed_data_list(list())
# Only block UI if we have a station selected (fetch will occur)
if (!is.null(current_station_id())) {
loading_status(TRUE)
}
},
priority = 1000,
ignoreInit = TRUE
)
observeEvent(current_station_id(),
{
station_data(NULL)
parsed_data_list(list())
# Only block UI if a valid station is selected
if (!is.null(current_station_id())) {
loading_status(TRUE)
}
},
priority = 1000,
ignoreInit = TRUE
)
# Initial Trigger (Debounced Window)
window_reactive <- reactive({
req(input$date_range)
list(id = current_station_id(), start = input$date_range[1], end = input$date_range[2])
})
window_debounced <- window_reactive %>% debounce(500)
reset_fetch <- function(msg = NULL) {
fetch_stage(0)
# Invalidate token to kill pending async tasks
fetch_current_token(as.numeric(Sys.time()))
loading_status(FALSE)
if (!is.null(msg)) loading_diagnostics(msg)
tmp <- fetch_temp_file()
if (!is.null(tmp) && file.exists(tmp)) unlink(tmp)
fetch_temp_file(NULL)
parsed_data_list(list())
# Unfreeze UI when fetch is reset/cancelled
session$sendCustomMessage("unfreezeUI", list())
}
# Handle Cancel from Freeze Window
observeEvent(input$cancel_loading, {
reset_fetch("Cancelled by user")
showNotification("Loading cancelled by user.", type = "warning")
})
# Stage 1: Initialization
observe({
req(window_debounced()$id, window_debounced()$start, window_debounced()$end)
# Prevent re-triggering if already running for same params (optional logic could go here)
# For now, any change restarts fetch.
station_id <- window_debounced()$id
loading_status(TRUE)
# Get station name for display
station_meta <- all_stations() %>% filter(id == station_id)
station_name <- if (nrow(station_meta) > 0) station_meta$name[1] else station_id
msg <- "Initializing data fetch..."
loading_diagnostics(msg)
fetch_message(msg)
# Freeze UI during download and parsing - include station name
session$sendCustomMessage("freezeUI", list(text = msg, station = station_name))
# Generate Filtered Index - Optimize based on Date Range
req_start_date <- as.Date(window_debounced()$start)
req_end_date <- as.Date(window_debounced()$end)
# Helper to convert YYYYMMDD string to Date
parse_dwd_date <- function(x) {
as.Date(x, format = "%Y%m%d")
}
targets <- current_index() %>%
filter(id == station_id) %>%
filter(
# Logic: Keep file if (FileStart <= ReqEnd) AND (FileEnd >= ReqStart)
# Handle cases where start/end might be NA or "recent" type implies checking
type == "recent" | # Always check recent files as they might have fresh data
type == "solar" | # Solar usually has one file or simple structure, keep safe
(
!is.na(start_date) & !is.na(end_date) &
parse_dwd_date(start_date) <= req_end_date &
parse_dwd_date(end_date) >= req_start_date
)
)
if (nrow(targets) == 0) {
# Fallback/Debug: check if we filtered out everything
raw_targets <- current_index() %>% filter(id == station_id)
msg <- "No data found in index for this station."
if (nrow(raw_targets) > 0) {
# Data exists but was filtered out by date range
msg <- "No data found for the selected date range."
}
reset_fetch(msg)
return()
}
# Setup Queue
# We convert targets to a list of rows for easier iteration
q <- split(targets, seq(nrow(targets)))
fetch_queue(q)
fetch_queue_idx(1)
parsed_data_list(list()) # Clear previous
# New Token
token <- as.numeric(Sys.time())
fetch_current_token(token)
fetch_stage(2) # Go to Next File
})
# Stage 2: Next File
observe({
req(fetch_stage() == 2)
idx <- fetch_queue_idx()
q <- fetch_queue()
if (idx > length(q)) {
# All files done -> Merge
fetch_stage(6)
} else {
# Setup next file
target <- q[[idx]]
msg <- paste0("Downloading file ", idx, "/", length(q), "...")
loading_diagnostics(msg)
session$sendCustomMessage("freezeUI", list(text = msg))
fetch_message(msg)
# Initialize tracking for new file
fetch_total_size(0) # Unknown initially
fetch_current_pos(0)
# Prep Temp File
tmp <- tempfile(fileext = ".zip")
fetch_temp_file(tmp)
file.create(tmp)
fetch_stage(4) # Go directly to Download Loop
}
})
# Stage 3: REMOVED (Head Check caused blocking)
# Stage 4: Download Chunk
observe({
req(fetch_stage() == 4)
# Settings - Increased to 50MB to reduce loop overhead
chunk_size <- 50 * 1024 * 1024
total <- fetch_total_size()
current <- fetch_current_pos()
tmp <- fetch_temp_file()
idx <- fetch_queue_idx()
q <- fetch_queue()
target <- q[[idx]]
# If total unknown, we assume we want next chunk
if (total > 0) {
end <- min(current + chunk_size - 1, total - 1)
} else {
end <- current + chunk_size - 1
}
token <- fetch_current_token()
message(paste0("[Debug] Stage 4: Starting chunk download. Range: ", current, "-", end, " Total: ", total))
later::later(function() {
if (!identical(isolate(fetch_current_token()), token)) {
message("[Debug] Stage 4: Token mismatch in later callback. Aborting.")
return()
}
tryCatch(
{
isolate({
h <- curl::new_handle()
curl::handle_setopt(
h,
range = paste0(current, "-", end),
# Robustness: Timeout if connection hangs or speed is too low
connecttimeout = 10,
low_speed_time = 30, # Abort if speed < 100 B/s for 30s
low_speed_limit = 100
)
message(paste0("[Debug] Stage 4: Executing curl for ", target$url))
tryCatch(
{
resp <- curl::curl_fetch_memory(target$url, handle = h)
message(paste0("[Debug] Stage 4: Curl finished. Status: ", resp$status_code))
if (resp$status_code == 200) {
# 200 OK means server sent the whole file (or ignored range)
con <- file(tmp, open = "wb")
writeBin(resp$content, con)
close(con)
new_pos <- length(resp$content)
fetch_current_pos(new_pos)
fetch_total_size(new_pos)
msg <- paste0("Downloaded file ", idx, "/", length(q), ": ", format_bytes(new_pos), " (Finished)")
loading_diagnostics(msg)
session$sendCustomMessage("freezeUI", list(text = msg))
fetch_message(msg)
fetch_stage(5) # Parse
} else if (resp$status_code == 206) {
# 206 Partial Content
con <- file(tmp, open = "ab")
writeBin(resp$content, con)
close(con)
bytes_received <- length(resp$content)
new_pos <- current + bytes_received
fetch_current_pos(new_pos)
# Try to guess total from header if unknown
if (total == 0) {
headers <- curl::parse_headers_list(resp$headers)
if (!is.null(headers[["content-range"]])) {
cr <- headers[["content-range"]]
parts <- strsplit(cr, "/")[[1]]
if (length(parts) == 2 && parts[2] != "*") {
t_val <- as.numeric(parts[2])
if (!is.na(t_val)) fetch_total_size(t_val)
}
}
}
total <- fetch_total_size()
done <- FALSE
percent_str <- ""
if (total > 0) {
if (new_pos >= total) done <- TRUE
pct <- round(new_pos / total * 100)
percent_str <- paste0(" / ", format_bytes(total), " (", pct, "%)")
} else {
if (bytes_received < chunk_size) done <- TRUE
percent_str <- " (Unknown total)"
}
msg <- paste0(
"Downloading file ", idx, "/", length(q), ": ",
format_bytes(new_pos), percent_str
)
loading_diagnostics(msg)
session$sendCustomMessage("freezeUI", list(text = msg))
fetch_message(msg)
if (done) {
if (total == 0) fetch_total_size(new_pos)
fetch_stage(5) # Parse
} else {
fetch_stage(4) # Next Chunk
}
} else if (resp$status_code == 416) {
# Range Not Satisfiable
if (total == 0 && current > 0) {
fetch_stage(5)
} else {
loading_diagnostics(paste("Download Error HTTP", resp$status_code))
fetch_stage(0)
}
} else {
loading_diagnostics(paste("Download Error HTTP", resp$status_code))
fetch_stage(0)
}
},
error = function(e) {
# Handle Timeout/Connection Errors
message(paste0("[Debug] Download Failed: ", e$message))
loading_diagnostics(paste("Download Failed:", e$message))
session$sendCustomMessage("unfreezeUI", list())
showNotification(
paste("Download stopped: Connection timed out or stuck. Please try again later. (File ", idx, ")"),
type = "error",
duration = NULL # Persistent until clicked
)
fetch_stage(0)
}
)
})
},
error = function(e) {
message(paste0("[Debug] Critical Async Error: ", e$message))
msg <- paste("Critical Async Error:", e$message)
loading_diagnostics(msg)
loading_status(FALSE)
session$sendCustomMessage("unfreezeUI", list())
fetch_stage(0)
}
)
}, 0.05)
fetch_stage(-1)
})
# Stage 5: Parse
observe({
req(fetch_stage() == 5)
token <- fetch_current_token()
idx <- fetch_queue_idx()
len <- length(fetch_queue())
msg <- paste0("Parsing file ", idx, "/", len, "...")
loading_diagnostics(msg)
session$sendCustomMessage("freezeUI", list(text = msg))
fetch_message(msg)
later::later(function() {
if (!identical(isolate(fetch_current_token()), token)) {
return()
}
isolate({
tmp <- fetch_temp_file()
# We reuse read_dwd_data logic
# We need start/end dates for efficient parsing if supported
s_date <- window_debounced()$start
e_date <- window_debounced()$end
# Call parser (from global environment)
parsed <- tryCatch(read_dwd_data(tmp, s_date, e_date), error = function(e) NULL)
if (!is.null(parsed)) {
# Add to list
plist <- parsed_data_list()
plist[[length(plist) + 1]] <- parsed
parsed_data_list(plist)
}
unlink(tmp)
fetch_temp_file(NULL)
# Next file
fetch_queue_idx(idx + 1)
fetch_stage(2)
})
}, 0.1)
fetch_stage(-1)
})
# Stage 6: Merge
observe({
req(fetch_stage() == 6)
token <- fetch_current_token()
msg <- "Merging and finalizing data..."
loading_diagnostics(msg)
session$sendCustomMessage("freezeUI", list(text = msg))
fetch_message(msg)
later::later(function() {
if (!identical(isolate(fetch_current_token()), token)) {
return()
}
isolate({
plist <- parsed_data_list()
if (length(plist) == 0) {
loading_diagnostics("No valid data found.")
loading_status(FALSE)
station_data(NULL)
fetch_stage(0)
return()
}
# Merge Logic (simplified from parse_dwd.R)
final_df <- purrr::reduce(plist, full_join, by = "datetime")
# Coalesce
weather_vars <- c(
"temp", "temp_min", "temp_max", "temp_min_avg", "temp_max_avg", "rh", "dew_point",
"abs_humidity", "vapor_pressure", "wet_bulb_temp",
"precip", "wind_speed", "wind_dir", "pressure", "station_pressure", "cloud_cover", "cloud_cover_indicator",
"wind_gust_max", "solar_global", "sunshine_duration",
"soil_temp_2cm", "soil_temp_5cm", "soil_temp_10cm", "soil_temp_20cm", "soil_temp_50cm", "soil_temp_100cm",
"soil_temp_min_5cm",
"snow_depth", "snow_water_equiv", "snow_fresh_sum", "snow_depth_sum",
"thunderstorm", "glaze", "graupel", "hail", "fog", "frost", "storm_6", "storm_8", "dew",
"precip_net_thunderstorm", "precip_net_graupel", "precip_net_hail", "precip_net_fog",
"visibility", "visibility_indicator", "weather_code", "weather_text",
"cloud_layer1_code", "cloud_layer1_abbrev", "cloud_layer1_height", "cloud_layer1_amount",
"cloud_layer2_code", "cloud_layer2_abbrev", "cloud_layer2_height", "cloud_layer2_amount",
"cloud_layer3_code", "cloud_layer3_abbrev", "cloud_layer3_height", "cloud_layer3_amount",
"cloud_layer4_code", "cloud_layer4_abbrev", "cloud_layer4_height", "cloud_layer4_amount"
)
available_cols <- names(final_df)
clean_df <- final_df %>% select(datetime)
for (v in weather_vars) {
v_cols <- available_cols[grepl(paste0("^", v, "(\\.|$)"), available_cols)]
if (length(v_cols) > 0) {
clean_df[[v]] <- do.call(coalesce, final_df[v_cols])
}
}
# Cleanup
clean_df <- clean_df %>%
distinct(datetime, .keep_all = TRUE) %>%
arrange(datetime)
# Final Window Filter (safety) with Auto-Correction
s_date <- window_debounced()$start
e_date <- window_debounced()$end
if (!is.null(s_date) && !is.null(e_date)) {
req_start <- as.POSIXct(s_date)
req_end <- as.POSIXct(e_date) + days(1)
filtered_df <- clean_df %>%
filter(datetime >= req_start, datetime <= req_end)
# Check for mismatch: Data exists but not in window
if (nrow(filtered_df) == 0 && nrow(clean_df) > 0) {
data_max <- max(clean_df$datetime, na.rm = TRUE)
data_min <- min(clean_df$datetime, na.rm = TRUE)
# If the requested window is completely outside the data range
if (req_start > data_max || req_end < data_min) {
msg <- paste0("Warning: No data in selected range. Available data ends: ", format(data_max, "%Y-%m-%d"), ". Displaying available data.")
loading_diagnostics(msg)
# Do NOT auto-adjust dates (user preference)
# We just let it filter (which yields 0 rows) or show what we have
}
}
clean_df <- filtered_df
}
# If after filtering we have 0 rows, but we had data before, it means window is empty
if (nrow(clean_df) == 0 && nrow(final_df) > 0) {
msg <- "No data found for the selected date range."
loading_diagnostics(msg)
}
station_data(clean_df)
loading_diagnostics(paste0("Success: ", nrow(clean_df), " rows loaded."))
loading_status(FALSE)
fetch_stage(0)
# Navigate to Dashboard tab
if (input$main_nav != "Dashboard") {
updateNavbarPage(session, "main_nav", selected = "Dashboard")
}
# Show rendering message and then unfreeze after delay
session$sendCustomMessage("freezeUI", list(text = "Rendering plots..."))
later::later(function() {
session$sendCustomMessage("unfreezeUI", list())
}, 2.0) # Give 2 seconds for plots to render
})
}, 0.1)
fetch_stage(-1)
})
# Output logic for Panel
output$is_loading <- reactive({
loading_status()
})
outputOptions(output, "is_loading", suspendWhenHidden = FALSE)
output$station_ready <- reactive({
!is.null(station_data())
})
outputOptions(output, "station_ready", suspendWhenHidden = FALSE)
output$data_diagnostics <- renderUI({
HTML(loading_diagnostics())
})
output$has_diag <- reactive({
nzchar(loading_diagnostics())
})
outputOptions(output, "has_diag", suspendWhenHidden = FALSE)
# --- Plot Outputs (Top-Level Reactivity) ---
# These are defined outside of observers to ensure they invalidate immediately when station_data() changes.
output$temp_plot <- renderPlotly({
df <- station_data()
req(df)
create_temperature_plot(df, resolution = isolate(tolower(input$data_resolution)))
})
output$humidity_plot <- renderPlotly({
df <- station_data()
req(df)
create_humidity_plot(df, resolution = isolate(tolower(input$data_resolution)))
})
output$wind_overview_plot <- renderPlotly({
df <- station_data()
req(df)
create_wind_overview_plot(df, resolution = isolate(tolower(input$data_resolution)))
})
output$pressure_plot <- renderPlotly({
df <- station_data()
req(df)
create_pressure_plot(df, resolution = isolate(tolower(input$data_resolution)))
})
output$cloud_plot <- renderPlotly({
df <- station_data()
req(df)
create_cloud_cover_plot(df, resolution = isolate(tolower(input$data_resolution)))
})
output$precip_plot <- renderPlotly({
df <- station_data()
req(df)
create_precipitation_plot(df, resolution = isolate(tolower(input$data_resolution)))
})
output$wind_rose <- renderPlotly({
df <- station_data()
req(df)
create_wind_rose_plot(df)
})
output$solar_radiation_plot <- renderPlotly({
df <- station_data()
req(df)
create_solar_radiation_plot(df, resolution = isolate(tolower(input$data_resolution)))
})
output$sunshine_plot <- renderPlotly({
df <- station_data()
req(df)
create_sunshine_duration_plot(df, resolution = isolate(tolower(input$data_resolution)))
})
output$snow_plot <- renderPlotly({
df <- station_data()
req(df)
create_snow_plot(df, resolution = isolate(tolower(input$data_resolution)))
})
output$soil_temp_plot <- renderPlotly({
df <- station_data()
req(df)
create_soil_temp_plot(df, resolution = isolate(tolower(input$data_resolution)))
})
output$diurnal_plot <- renderPlotly({
df <- station_data()
req(df)
create_diurnal_plot(df)
})
output$weathergami_plot <- renderPlotly({
df <- station_data()
req(df)
create_weathergami_plot(df)
})
# Tables
output$hourly_data_table <- DT::renderDataTable({
req(!loading_status())
df <- station_data()
req(df)
# Format datetime
df <- df %>% mutate(datetime = format(datetime, "%Y-%m-%d %H:%M"))
# Rename columns using global mapping
# We use any_of() to safely select/rename only columns that exist in the data
# Note: rename(new = old) syntax, so we need to invert our named vector or use any_of matching
# rename(df, any_of(dwd_column_labels)) works if vector is NewName = OldName?
# dwd_column_labels is "old_code" = "New Name"
# dplyr::rename(df, any_of(dwd_column_labels)) expects c("New Name" = "old_code")?
# actually rename accepts named vector c(New = Old).
# Our vector is Old = New? No, it's c("temp" = "Air Temp").
# If we use rename(!!dwd_column_labels), it expects c(New = Old).
# So we likely need to invert the vector for rename, OR use label substitution.
# Actually, let's just use data.table or setNames for simplicity on the display DF.
# Invert the mapping: c("Air Temp" = "temp")
renaming_vec <- setNames(names(dwd_column_labels), dwd_column_labels)
# But wait, dwd_column_labels is defined as c("temp" = "Air Temp").
# dplyr::rename(df, !!!dwd_column_labels) -> this would try to rename column "Air Temp" to "temp"?
# No, rename(new = old).
# If I hav c(a = b), rename(!!!x) means rename b to a.
# So if dwd_column_labels is c("temp" = "Air Temp"), it means rename "Air Temp" to "temp".
# That is the Opposite of what we want. We have "temp" in DF, want "Air Temp".
# So we need c("Air Temp" = "temp").
display_labels <- setNames(names(dwd_column_labels), dwd_column_labels)
# Wait, names(dwd_column_labels) is "temp". dwd_column_labels is "Air Temp".
# setNames("temp", "Air Temp") -> c("Air Temp" = "temp")
# This is correct for rename(new = old).
cols_to_rename <- dwd_column_labels[names(dwd_column_labels) %in% names(df)]
# This gives c("temp" = "Air Temp") subset.
# We need to construct the rename vector: "Air Temp" = "temp"
rename_map <- setNames(names(cols_to_rename), cols_to_rename)
# Actually, much simpler:
req(df)
# Use simple column replacement for display
df_display <- df
for (col in names(dwd_column_labels)) {
if (col %in% names(df_display)) {
names(df_display)[names(df_display) == col] <- dwd_column_labels[[col]]
}
}
datatable(df_display,
options = list(pageLength = 15, scrollX = TRUE)
)
})
# Download Handler
output$download_hourly <- downloadHandler(
filename = function() {
id <- current_station_id()
res <- if (!is.null(input$data_resolution)) tolower(input$data_resolution) else "data"
if (is.null(id)) {
paste0("dwd_data_", res, ".xlsx")
} else {
paste0("dwd_station_", id, "_", res, ".xlsx")
}
},
content = function(file) {
req(station_data())
# Ensure proper datetime formatting for Excel (writexl handles POSIXct, but text might be safer for strict format)
# Actually writexl handles dates nicely natively.
out_df <- station_data()
write_xlsx(out_df, path = file)
}
)
# Data Tab Header - Info Callout Card
output$station_info_header <- renderUI({
id <- current_station_id()
if (is.null(id)) {
return(NULL)
}
# Metadata
meta <- all_stations() %>% dplyr::filter(id == !!id)
if (nrow(meta) == 0) {
return(NULL)
}
s_name <- meta$name[1]
s_state <- meta$state[1]
s_elev <- meta$elevation[1]
# Dynamic Badge Logic
res_label <- input$data_resolution
res_class <- if (tolower(res_label) == "hourly") "bg-primary" else "bg-success"
# Data Range
df <- station_data()
if (is.null(df) || nrow(df) == 0) {
dates_text <- "No data loaded"
} else {
date_range <- range(as.Date(df$datetime), na.rm = TRUE)
dates_text <- paste(date_range[1], "to", date_range[2])
}
# Unified Info Card
card(
style = "margin-bottom: 20px; border-left: 5px solid #007bff;",
card_body(
padding = 15,
layout_columns(
fill = FALSE,
# Col 1: Station
div(
strong("Station"), br(),
span(s_name, style = "font-size: 1.1rem;"), br(),
tags$small(class = "text-muted", paste("ID:", id))
),
# Col 2: Location
div(
strong("Location"), br(),
span(s_state), br(),
tags$small(class = "text-muted", paste0(meta$latitude[1], "°N, ", meta$longitude[1], "°E"))
),
# Col 3: Elevation & Resolution
div(
strong("Technical"), br(),
span(paste0(s_elev, " m")), br(),
span(class = paste("badge", res_class), res_label)
),
# Col 4: Period
div(
strong("Data Selection"), br(),
span(dates_text)
),
# Col 5: Actions
div(
class = "d-flex align-items-center justify-content-end",
downloadButton(
"download_hourly",
label = "Export Excel",
class = "btn-sm btn-primary",
icon = icon("file-excel")
)
)
)
)
)
})
output$table <- DT::renderDataTable({
# Mirror MeteoFrance: Select specific columns and disable row names
# This ensures rowData[0] in JS is the Station ID
df_display <- filtered_stations() %>%
select(
"ID" = id,
"Name" = name,
"State" = state,
"Elev." = elevation,
"Status" = detailed_summary
)
datatable(df_display,
selection = "none",
rownames = FALSE,
callback = JS("
table.on('dblclick', 'tr', function() {
var rowData = table.row(this).data();
if (rowData !== undefined && rowData !== null) {
var stationId = rowData[0];
Shiny.setInputValue('table_station_dblclick', stationId, {priority: 'event'});
}
});
")
)
})
# Selection from Table - Double Click
observeEvent(input$table_station_dblclick, {
id_val <- input$table_station_dblclick
req(id_val)
# Get station metadata
# Using filtered_stations ensures we respect current context, but ID lookup is robust
s <- filtered_stations() %>% filter(id == id_val)
# Fallback to all stations if filter changed momentarily (unlikely but safe)
if (nrow(s) == 0) {
s <- all_stations() %>% filter(id == id_val)
}
req(nrow(s) > 0)
current_station_id(id_val)
# Sync dropdown (sidebar)
updateSelectizeInput(session, "station_selector", selected = id_val)
# Highlight on map (data fetch triggered by current_station_id change will navigate to Dashboard)
lat_val <- s$latitude[1]
lng_val <- s$longitude[1]
# Highlight & Zoom
# Generate Label
res <- if (is.null(input$data_resolution)) NULL else tolower(input$data_resolution)
overall_start <- if ("station_overall_start" %in% names(s)) s$station_overall_start[1] else s$start_date[1]
overall_end <- if ("station_overall_end" %in% names(s)) s$station_overall_end[1] else s$end_date[1]
lbl <- generate_station_label(
s$name[1], s$id[1], s$state[1],
s$start_date[1], s$end_date[1], s$detailed_summary[1],
overall_start, overall_end, res
)
highlight_selected_station(maplibre_proxy("map"), lng_val, lat_val, lbl)
# NOTE: Data parsing and Dashboard navigation are automatically triggered
# by the change in current_station_id() through window_debounced -> fetch state machine
})
# Dynamic Plots Panel - All plots in grid format (no tabs)
# Only display plots that have data available
output$details_tabs <- renderUI({
req(input$data_resolution)
# Don't render if we are currently loading/fetching new data
if (isTRUE(loading_status())) {
return(NULL)
}
res <- isolate(tolower(input$data_resolution))
df <- station_data()
req(df)
# Clean data once for checking
df_clean <- clean_dwd_data(df)
# Check data availability for each parameter
has_temp <- ("temp" %in% names(df_clean) && any(!is.na(df_clean$temp))) ||
("temp_min" %in% names(df_clean) && any(!is.na(df_clean$temp_min))) ||
("temp_max" %in% names(df_clean) && any(!is.na(df_clean$temp_max)))
has_humidity <- ("rh" %in% names(df_clean) && any(!is.na(df_clean$rh))) ||
("dew_point" %in% names(df_clean) && any(!is.na(df_clean$dew_point)))
has_wind <- ("wind_speed" %in% names(df_clean) && any(!is.na(df_clean$wind_speed))) ||
("wind_gust_max" %in% names(df_clean) && any(!is.na(df_clean$wind_gust_max)))
has_pressure <- ("pressure" %in% names(df_clean) && any(!is.na(df_clean$pressure))) ||
("station_pressure" %in% names(df_clean) && any(!is.na(df_clean$station_pressure)))
has_cloud <- ("cloud_cover" %in% names(df_clean) && any(!is.na(df_clean$cloud_cover)))
has_precip <- ("precip" %in% names(df_clean) && any(!is.na(df_clean$precip)))
has_solar <- ("solar_global" %in% names(df_clean) && any(!is.na(df_clean$solar_global)))
has_sunshine <- ("sunshine_duration" %in% names(df_clean) && any(!is.na(df_clean$sunshine_duration)))
has_wind_dir <- ("wind_dir" %in% names(df_clean) && any(!is.na(df_clean$wind_dir))) && has_wind
has_daily_temp <- ("temp_min" %in% names(df_clean) && any(!is.na(df_clean$temp_min))) &&
("temp_max" %in% names(df_clean) && any(!is.na(df_clean$temp_max)))
has_snow <- ("snow_depth" %in% names(df_clean) && any(!is.na(df_clean$snow_depth))) ||
("snow_water_equiv" %in% names(df_clean) && any(!is.na(df_clean$snow_water_equiv))) ||
("snow_fresh_sum" %in% names(df_clean) && any(!is.na(df_clean$snow_fresh_sum))) ||
("snow_depth_sum" %in% names(df_clean) && any(!is.na(df_clean$snow_depth_sum)))
has_soil_temp <- ("soil_temp_2cm" %in% names(df_clean) && any(!is.na(df_clean$soil_temp_2cm))) ||
("soil_temp_5cm" %in% names(df_clean) && any(!is.na(df_clean$soil_temp_5cm))) ||
("soil_temp_10cm" %in% names(df_clean) && any(!is.na(df_clean$soil_temp_10cm))) ||
("soil_temp_20cm" %in% names(df_clean) && any(!is.na(df_clean$soil_temp_20cm))) ||
("soil_temp_50cm" %in% names(df_clean) && any(!is.na(df_clean$soil_temp_50cm))) ||
("soil_temp_100cm" %in% names(df_clean) && any(!is.na(df_clean$soil_temp_100cm)))
# Build list of available plots
plot_list <- tagList()
if (has_temp) {
plot_list <- tagList(plot_list, div(class = "col-12 col-lg-6", plotlyOutput("temp_plot", height = "320px")))
}
if (has_humidity) {
plot_list <- tagList(plot_list, div(class = "col-12 col-lg-6", plotlyOutput("humidity_plot", height = "320px")))
}
if (has_wind) {
plot_list <- tagList(plot_list, div(class = "col-12 col-lg-6", plotlyOutput("wind_overview_plot", height = "320px")))
}
if (has_pressure) {
plot_list <- tagList(plot_list, div(class = "col-12 col-lg-6", plotlyOutput("pressure_plot", height = "320px")))
}
if (has_cloud) {
plot_list <- tagList(plot_list, div(class = "col-12 col-lg-6", plotlyOutput("cloud_plot", height = "320px")))
}
if (has_precip) {
plot_list <- tagList(plot_list, div(class = "col-12 col-lg-6", plotlyOutput("precip_plot", height = "320px")))
}
if (has_solar) {
plot_list <- tagList(plot_list, div(class = "col-12 col-lg-6", plotlyOutput("solar_radiation_plot", height = "320px")))
}
if (has_sunshine) {
plot_list <- tagList(plot_list, div(class = "col-12 col-lg-6", plotlyOutput("sunshine_plot", height = "320px")))
}
# Conditional: Wind Rose for hourly, Weathergami for daily
if (res == "hourly" && has_wind_dir) {
plot_list <- tagList(plot_list, div(class = "col-12 col-lg-6", plotlyOutput("wind_rose", height = "320px")))
}
if (res == "daily" && has_daily_temp) {
plot_list <- tagList(plot_list, div(class = "col-12 col-lg-6", plotlyOutput("weathergami_plot", height = "320px")))
}
if (has_snow) {
plot_list <- tagList(plot_list, div(class = "col-12 col-lg-6", plotlyOutput("snow_plot", height = "320px")))
}
if (has_soil_temp) {
plot_list <- tagList(plot_list, div(class = "col-12 col-lg-6", plotlyOutput("soil_temp_plot", height = "320px")))
}
# Wrap in grid container
div(
class = "row g-3",
style = "padding: 10px;",
plot_list
)
})
# --- URL Synchronization Observers ---
# 1. Tab changes
observeEvent(input$main_nav,
{
broadcast_state()
},
ignoreInit = TRUE
)
# 2. Station ID change (covers map click and selector)
observeEvent(current_station_id(),
{
broadcast_state()
},
ignoreInit = TRUE
)
# 3. Dashboard subtab changes
observeEvent(input$dashboard_subtabs,
{
broadcast_state()
},
ignoreInit = TRUE
)
# 4. Resolution changes
observeEvent(input$data_resolution,
{
broadcast_state()
},
ignoreInit = TRUE
)
# 5. Date Range changes
observeEvent(input$date_range,
{
broadcast_state()
},
ignoreInit = TRUE
)
}