OwenStOnge commited on
Commit
da6ef78
·
verified ·
1 Parent(s): 6f9f7ce

Update app.R

Browse files
Files changed (1) hide show
  1. app.R +59 -24
app.R CHANGED
@@ -656,7 +656,6 @@ ui <- fluidPage(
656
 
657
  br(),
658
  actionButton("scrape_btn", "Scrape Data", class = "btn-primary"),
659
- actionButton("fetch_btn", "Fetch Results", class = "btn-primary"),
660
  br(), br(),
661
  downloadButton("download_scrape", "Download CSV")
662
  ),
@@ -703,6 +702,7 @@ server <- function(input, output, session) {
703
  bat_tracking_parsed <- reactiveVal(NULL)
704
  merge_result <- reactiveVal(NULL)
705
  scraped_data <- reactiveVal(NULL)
 
706
 
707
  scrape_status_msg <- reactiveVal("Ready.")
708
 
@@ -1591,46 +1591,81 @@ observeEvent(input$scrape_btn, {
1591
  if (is.null(result)) return()
1592
 
1593
  if (httr::status_code(result) == 204) {
1594
- scrape_status_msg("Scrape triggered! GitHub is running it now. Wait a few minutes then click 'Fetch Results'.")
 
1595
  } else {
1596
  scrape_status_msg(paste("GitHub API error:", httr::status_code(result)))
1597
  }
1598
  })
1599
 
1600
- # Fetch results from GitHub
1601
- observeEvent(input$fetch_btn, {
1602
- scrape_status_msg("Fetching results...")
 
 
1603
 
1604
  gh_token <- Sys.getenv("GITHUB_TOKEN")
1605
  gh_repo <- Sys.getenv("GITHUB_REPO")
1606
- filename <- paste0(input$scrape_source, "_", input$start_date, "_to_", input$end_date, ".csv")
1607
-
1608
- url <- paste0("https://api.github.com/repos/", gh_repo, "/contents/data/", filename)
1609
 
1610
- result <- tryCatch({
1611
- resp <- httr::GET(
1612
- url,
1613
  httr::add_headers(
1614
  Authorization = paste("Bearer", gh_token),
1615
- Accept = "application/vnd.github.v3.raw"
1616
  )
1617
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1618
 
1619
- if (httr::status_code(resp) == 200) {
1620
- read.csv(text = httr::content(resp, as = "text", encoding = "UTF-8"))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1621
  } else {
1622
- NULL
1623
  }
1624
- }, error = function(e) {
1625
- scrape_status_msg(paste("Error:", e$message))
1626
- NULL
1627
- })
1628
-
1629
- if (!is.null(result) && nrow(result) > 0) {
1630
- scraped_data(result)
1631
- scrape_status_msg(paste0("Done! ", nrow(result), " rows × ", ncol(result), " columns."))
1632
  } else {
1633
- scrape_status_msg("Not ready yet. Wait a minute and try again.")
1634
  }
1635
  })
1636
 
 
656
 
657
  br(),
658
  actionButton("scrape_btn", "Scrape Data", class = "btn-primary"),
 
659
  br(), br(),
660
  downloadButton("download_scrape", "Download CSV")
661
  ),
 
702
  bat_tracking_parsed <- reactiveVal(NULL)
703
  merge_result <- reactiveVal(NULL)
704
  scraped_data <- reactiveVal(NULL)
705
+ scrape_polling <- reactiveVal(FALSE)
706
 
707
  scrape_status_msg <- reactiveVal("Ready.")
708
 
 
1591
  if (is.null(result)) return()
1592
 
1593
  if (httr::status_code(result) == 204) {
1594
+ scrape_status_msg("Scrape triggered! Waiting for GitHub to finish...")
1595
+ scrape_polling(TRUE)
1596
  } else {
1597
  scrape_status_msg(paste("GitHub API error:", httr::status_code(result)))
1598
  }
1599
  })
1600
 
1601
+ # Poll GitHub every 15 seconds to check if done
1602
+ observe({
1603
+ req(scrape_polling())
1604
+
1605
+ invalidateLater(15000, session)
1606
 
1607
  gh_token <- Sys.getenv("GITHUB_TOKEN")
1608
  gh_repo <- Sys.getenv("GITHUB_REPO")
 
 
 
1609
 
1610
+ resp <- tryCatch({
1611
+ httr::GET(
1612
+ paste0("https://api.github.com/repos/", gh_repo, "/actions/runs?per_page=1"),
1613
  httr::add_headers(
1614
  Authorization = paste("Bearer", gh_token),
1615
+ Accept = "application/vnd.github.v3+json"
1616
  )
1617
  )
1618
+ }, error = function(e) { NULL })
1619
+
1620
+ if (is.null(resp)) return()
1621
+
1622
+ runs <- jsonlite::fromJSON(httr::content(resp, as = "text", encoding = "UTF-8"))
1623
+
1624
+ if (length(runs$workflow_runs) == 0) return()
1625
+
1626
+ latest <- runs$workflow_runs[1, ]
1627
+ status <- latest$status
1628
+ conclusion <- latest$conclusion
1629
+
1630
+ if (status == "completed") {
1631
+ scrape_polling(FALSE)
1632
 
1633
+ if (conclusion == "success") {
1634
+ scrape_status_msg("GitHub finished! Fetching data...")
1635
+
1636
+ # Auto-fetch the CSV
1637
+ filename <- paste0(input$scrape_source, "_", input$start_date, "_to_", input$end_date, ".csv")
1638
+ url <- paste0("https://api.github.com/repos/", gh_repo, "/contents/data/", filename)
1639
+
1640
+ data <- tryCatch({
1641
+ file_resp <- httr::GET(
1642
+ url,
1643
+ httr::add_headers(
1644
+ Authorization = paste("Bearer", gh_token),
1645
+ Accept = "application/vnd.github.v3.raw"
1646
+ )
1647
+ )
1648
+
1649
+ if (httr::status_code(file_resp) == 200) {
1650
+ read.csv(text = httr::content(file_resp, as = "text", encoding = "UTF-8"))
1651
+ } else {
1652
+ NULL
1653
+ }
1654
+ }, error = function(e) { NULL })
1655
+
1656
+ if (!is.null(data) && nrow(data) > 0) {
1657
+ scraped_data(data)
1658
+ scrape_status_msg(paste0("Done! ", nrow(data), " rows × ", ncol(data), " columns."))
1659
+ } else {
1660
+ scrape_status_msg("Scrape finished but couldn't fetch the file. Try 'Fetch Results' manually.")
1661
+ }
1662
+
1663
  } else {
1664
+ scrape_status_msg(paste("GitHub Action failed:", conclusion))
1665
  }
1666
+
 
 
 
 
 
 
 
1667
  } else {
1668
+ scrape_status_msg(paste0("GitHub is running... (status: ", status, ")"))
1669
  }
1670
  })
1671