3v324v23 commited on
Commit
efb2094
·
1 Parent(s): 18205f8

Update Shiny app with latest app.R and config

Browse files
app.R CHANGED
@@ -1,36 +1,18 @@
1
  ##################################################################
2
- # Single R Script: Mortality Analysis + Shiny - Two-Click BBox
3
- # Using Weekly Queries (No Hard-Coded Years)
4
  ##################################################################
5
 
6
- # See https://www.inaturalist.org/pages/api+recommended+practices
7
- # Query Rate
8
- # Please keep requests to about 1 per second, and around 10k API requests a day
9
- # The API is meant to be used for building applications and for fetching small to medium batches of data. It is not meant to be a way to download data in bulk
10
- # Requests exceeding this limit might be throttled, and will return an HTTP 429 exception “Too Many Requests”
11
- # Please add delays into your code to keep under these limits, and especially if you’re getting 429 errors
12
- # We may block IPs that consistently exceed these limits
13
- # Please use a single IP address for fetching data. If we think multiple IPs are being used in coordination to bypass rate limits, we may block those IPs regardless of query rate
14
- # Downloading over 5 GB of media per hour or 24 GB of media per day may result in a permanent block
15
- # If writing software to interact with the API, please consider using a custom User Agent to identify your application, or iNaturalist username, or something we might use to differentiate your requests
16
- # The User Agent can be set with an HTTP header, e.g. User-Agent: [[application or user name]
17
- #
18
-
19
- ### 1) Install/Load Required Packages ####
20
  required_packages <- c(
21
  "httr", "jsonlite", "tidyverse", "glue", "lubridate",
22
- "wesanderson", "viridis", "shinycssloaders",
23
  "DT", "maps", "mapdata", "leaflet", "leaflet.extras",
24
- "shinythemes", "shiny"
25
  )
26
-
27
  installed_packages <- rownames(installed.packages())
28
  for (pkg in required_packages) {
29
- if (!pkg %in% installed_packages) {
30
- install.packages(pkg, dependencies = TRUE)
31
- }
32
  }
33
-
34
  library(httr)
35
  library(jsonlite)
36
  library(tidyverse)
@@ -38,6 +20,7 @@ library(glue)
38
  library(lubridate)
39
  library(wesanderson)
40
  library(viridis)
 
41
  library(shinycssloaders)
42
  library(DT)
43
  library(maps)
@@ -46,284 +29,234 @@ library(leaflet)
46
  library(leaflet.extras)
47
  library(shinythemes)
48
  library(shiny)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
49
 
50
- ##################################################################
51
- # 2) Mortality-Analysis Functions
52
- ##################################################################
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
 
54
- # -- Base function to fetch dead observations over a specified date range --
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
55
  fetch_dead_data_once <- function(
56
- place_id = NULL,
57
- swlat = NULL,
58
- swlng = NULL,
59
- nelat = NULL,
60
- nelng = NULL,
61
- start_date,
62
- end_date,
63
- iconic_taxa = NULL,
64
- taxon_name = NULL,
65
- conservation_status = NULL,
66
- per_page = 200,
67
- max_pages = 200
68
  ) {
69
  base_url <- "https://api.inaturalist.org/v1/observations"
70
-
71
  q_parts <- list(
72
- "term_id=17", # 'Dead' annotation
73
- "term_value_id=19", # 'Dead' annotation
74
- "verifiable=true", # only verifiable
75
- glue("d1={start_date}"), # start date
76
- glue("d2={end_date}"), # end date
77
- "order=desc",
78
- "order_by=created_at",
79
- glue("per_page={per_page}")
80
  )
81
-
82
- if (!is.null(iconic_taxa) && iconic_taxa != "") {
83
- q_parts <- c(q_parts, glue("iconic_taxa={iconic_taxa}"))
84
- }
85
-
86
- if (!is.null(taxon_name) && taxon_name != "") {
87
- q_parts <- c(q_parts, glue("taxon_name={URLencode(taxon_name)}"))
88
- }
89
-
90
- if (!is.null(conservation_status) && conservation_status != "") {
91
- # If you'd like to filter by iNat conservation status,
92
- # or e.g. pass additional parameters to the API
93
- if (!grepl("=", conservation_status, fixed = TRUE)) {
94
- q_parts <- c(q_parts, glue("cs={URLencode(conservation_status)}"))
95
- } else {
96
- q_parts <- c(q_parts, conservation_status)
97
- }
98
- }
99
-
100
  query_params <- paste(q_parts, collapse = "&")
101
-
102
- # Build location portion of query
103
- loc_part <- ""
104
- if (!is.null(place_id)) {
105
- loc_part <- glue("&place_id={place_id}")
106
- } else if (!is.null(swlat) && !is.null(swlng) &&
107
- !is.null(nelat) && !is.null(nelng)) {
108
- loc_part <- glue("&nelat={nelat}&nelng={nelng}&swlat={swlat}&swlng={swlng}")
109
- } else {
110
- stop("Must provide either 'place_id' OR bounding box (swlat, swlng, nelat, nelng).")
111
- }
112
-
113
  observations_list <- list()
114
  current_page <- 1
115
-
116
  while (current_page <= max_pages) {
117
- query_url <- paste0(
118
- base_url, "?", query_params, "&page=", current_page, loc_part
119
- )
120
-
121
- message("Fetching page ", current_page,
122
- " [", start_date, " to ", end_date, "]:\n", query_url)
123
-
124
  resp <- GET(query_url)
125
  if (http_error(resp)) {
126
  warning("HTTP error on page ", current_page, ": ", status_code(resp))
127
  break
128
  }
129
-
130
  parsed <- content(resp, as = "text", encoding = "UTF-8") %>%
131
  fromJSON(flatten = TRUE)
132
-
133
- if (length(parsed$results) == 0) {
134
- message("No more results at page ", current_page)
135
- break
136
- }
137
-
138
  obs_page_df <- as_tibble(parsed$results)
139
  observations_list[[current_page]] <- obs_page_df
140
-
141
- # If the returned page is smaller than per_page, we've reached the last page
142
- if (nrow(obs_page_df) < per_page) {
143
- message("Reached last page of results at page ", current_page)
144
- break
145
- }
146
-
147
  current_page <- current_page + 1
148
- Sys.sleep(1.5) # Polite pause
149
  }
150
-
151
- observations_all <- bind_rows(observations_list)
152
- return(observations_all)
153
  }
154
 
155
- # -- Function to fetch data by iterating through each WEEK of a given year --
156
- fetch_dead_data_weekly <- function(
157
- year,
158
- place_id = NULL,
159
- swlat = NULL,
160
- swlng = NULL,
161
- nelat = NULL,
162
- nelng = NULL,
163
- iconic_taxa = NULL,
164
- taxon_name = NULL,
165
- conservation_status = NULL,
166
- per_page = 200,
167
- max_pages = 200
168
  ) {
169
- start_of_year <- as.Date(glue("{year}-01-01"))
170
- end_of_year <- as.Date(glue("{year}-12-31"))
171
-
172
- # Create a sequence of "week starts" from Jan 1 to Dec 31
173
- week_starts <- seq.Date(start_of_year, end_of_year, by = "1 week")
174
-
175
- weekly_list <- list()
176
-
177
  for (i in seq_along(week_starts)) {
178
- start_date <- week_starts[i]
179
- # If not the last index, end_date = next start - 1 day, else clamp to year-end
180
- if (i < length(week_starts)) {
181
- end_date <- week_starts[i + 1] - 1
182
- } else {
183
- end_date <- end_of_year
 
 
184
  }
185
-
186
- message("\n--- Querying ", year, ", Week #", i,
187
- " [", start_date, " to ", end_date, "] ---")
188
-
189
  df_week <- fetch_dead_data_once(
190
- place_id = place_id,
191
- swlat = swlat,
192
- swlng = swlng,
193
- nelat = nelat,
194
- nelng = nelng,
195
- start_date = start_date,
196
- end_date = end_date,
197
- iconic_taxa = iconic_taxa,
198
- taxon_name = taxon_name,
199
- conservation_status = conservation_status,
200
- per_page = per_page,
201
- max_pages = max_pages
202
  )
203
- weekly_list[[i]] <- df_week
204
-
205
- Sys.sleep(1.5)
206
- }
207
-
208
- year_df <- bind_rows(weekly_list)
209
- return(year_df)
210
- }
211
-
212
- # -- Wrapper that iterates over multiple years, pulling data weekly for each year --
213
- getDeadVertebrates_weeklyLoop <- function(
214
- years, # <--- No default: pass your own vector of years
215
- place_id = NULL,
216
- swlat = NULL,
217
- swlng = NULL,
218
- nelat = NULL,
219
- nelng = NULL,
220
- iconic_taxa = NULL,
221
- taxon_name = NULL,
222
- conservation_status = NULL,
223
- per_page = 500,
224
- max_pages = 500,
225
- outdir = NULL
226
- ) {
227
- all_years_list <- list()
228
-
229
- # For each year, run weekly fetch
230
- for (yr in years) {
231
- message("\n========= YEAR: ", yr, " ==========\n")
232
- yr_df <- fetch_dead_data_weekly(
233
- year = yr,
234
- place_id = place_id,
235
- swlat = swlat,
236
- swlng = swlng,
237
- nelat = nelat,
238
- nelng = nelng,
239
- iconic_taxa= iconic_taxa,
240
- taxon_name = taxon_name,
241
- conservation_status = conservation_status,
242
- per_page = per_page,
243
- max_pages = max_pages
244
- ) %>%
245
- mutate(Window = as.character(yr))
246
-
247
- all_years_list[[as.character(yr)]] <- yr_df
248
  }
249
-
250
- merged_df_all <- bind_rows(all_years_list)
251
-
252
- # If no data found or missing crucial columns, create empty placeholders
253
- if (!"created_at_details.date" %in% names(merged_df_all) ||
254
- nrow(merged_df_all) == 0) {
255
- daily_plot <- ggplot() +
256
- labs(title = "No 'Dead' Observations Found", x = NULL, y = NULL) +
257
- theme_void()
258
-
259
- top_species_plot <- ggplot() +
260
- labs(title = "No species data", x = NULL, y = NULL) +
261
- theme_void()
262
-
263
- map_hotspots_gg <- ggplot() +
264
- labs(title = "No data for hotspots map") +
265
- theme_void()
266
-
267
  return(list(
268
  merged_df_all = merged_df_all,
269
  merged_df = merged_df_all,
270
- daily_plot = daily_plot,
271
- top_species_plot = top_species_plot,
272
- map_hotspots_gg = map_hotspots_gg,
273
  daily_90th_quant = NA
274
  ))
275
  }
276
-
277
- # Optionally write out to disk
278
- if (!is.null(outdir)) {
279
- if (!dir.exists(outdir)) {
280
- dir.create(outdir, recursive = TRUE)
281
- }
282
- readr::write_csv(merged_df_all, file.path(outdir, "merged_df_ALL_data.csv"))
283
- }
284
-
285
- # Aggregate counts by day
286
  counts_by_day <- merged_df_all %>%
287
- mutate(obs_date = as.Date(`observed_on`)) %>%
288
  group_by(Window, obs_date) %>%
289
  summarise(n = n_distinct(id), .groups = "drop")
290
-
291
  y_max_value <- max(counts_by_day$n, na.rm = TRUE)
292
- n_windows <- length(unique(counts_by_day$Window))
293
- wes_colors <- wes_palette("Zissou1", n_windows, type = "discrete")
294
-
295
- # Daily line plot
296
  daily_plot <- ggplot(counts_by_day, aes(x = obs_date, y = n, color = Window)) +
297
  geom_line(size = 1.2) +
298
  geom_point(size = 2) +
299
- # scale_color_viridis_d() +
300
- scale_x_date(date_labels = "%b", date_breaks = "1 month") +
301
  scale_y_continuous(limits = c(0, y_max_value)) +
302
  labs(
303
- title = glue("Daily 'Dead' Observations (Years {paste(years, collapse=', ')})"),
304
- x = "Month",
305
  y = "Number of Observations",
306
  color = "Year"
307
  ) +
308
  theme_minimal(base_size = 14) +
309
  theme(axis.text.x = element_text(angle = 45, hjust = 1))
310
-
311
- # Top species bar plot
312
  if ("taxon.name" %in% names(merged_df_all)) {
313
  species_counts <- merged_df_all %>%
314
  filter(!is.na(taxon.name)) %>%
315
  group_by(Window, taxon.name) %>%
316
  summarise(dead_count = n(), .groups = "drop")
317
-
318
  top_species_overall <- species_counts %>%
319
  group_by(taxon.name) %>%
320
  summarise(total_dead = sum(dead_count)) %>%
321
  arrange(desc(total_dead)) %>%
322
  slice_head(n = 20)
323
-
324
  species_top20 <- species_counts %>%
325
  filter(taxon.name %in% top_species_overall$taxon.name)
326
-
327
  top_species_plot <- ggplot(species_top20, aes(
328
  x = reorder(taxon.name, -dead_count),
329
  y = dead_count,
@@ -331,8 +264,6 @@ getDeadVertebrates_weeklyLoop <- function(
331
  )) +
332
  geom_col(position = position_dodge(width = 0.7)) +
333
  coord_flip() +
334
- # scale_fill_manual(values = wes_colors) +
335
- # scale_color_viridis_d() +
336
  labs(
337
  title = "Top 20 Species with 'Dead' Observations",
338
  x = "Species",
@@ -345,71 +276,51 @@ getDeadVertebrates_weeklyLoop <- function(
345
  labs(title = "No 'taxon.name' column found", x = NULL, y = NULL) +
346
  theme_void()
347
  }
348
-
349
- # Identify "high mortality" days (>= 90th percentile)
350
  daily_quantile <- quantile(counts_by_day$n, probs = 0.90, na.rm = TRUE)
351
  high_mortality_days <- counts_by_day %>%
352
  filter(n >= daily_quantile) %>%
353
  pull(obs_date)
354
-
355
  merged_high <- merged_df_all %>%
356
- mutate(obs_date = as.Date(`observed_on`)) %>%
357
  filter(obs_date %in% high_mortality_days)
358
-
359
- # Map of top-90% mortality days
360
- if ("location" %in% names(merged_high)) {
361
- location_df <- merged_high %>%
362
  filter(!is.na(location) & location != "") %>%
363
  separate(location, into = c("lat_str", "lon_str"), sep = ",", remove = FALSE) %>%
364
- mutate(
365
- latitude = as.numeric(lat_str),
366
- longitude = as.numeric(lon_str)
367
- )
368
-
369
- if (nrow(location_df) == 0) {
370
  map_hotspots_gg <- ggplot() +
371
- labs(title = "No data in top 90th percentile days with valid location") +
372
  theme_void()
373
  } else {
374
- min_lon <- min(location_df$longitude, na.rm = TRUE)
375
- max_lon <- max(location_df$longitude, na.rm = TRUE)
376
- min_lat <- min(location_df$latitude, na.rm = TRUE)
377
- max_lat <- max(location_df$latitude, na.rm = TRUE)
378
-
379
- map_hotspots_gg <- ggplot(location_df, aes(x = longitude, y = latitude, color = Window)) +
380
  borders("world", fill = "gray80", colour = "white") +
381
- geom_point(alpha = 0.6, size = 2) +
382
- # scale_color_viridis_d() +
383
- coord_quickmap(
384
- xlim = c(min_lon, max_lon),
385
- ylim = c(min_lat, max_lat),
386
- expand = TRUE
387
  ) +
 
 
388
  labs(
389
- title = glue("Top 90th percentile mortality days ({paste(years, collapse=', ')})"),
390
- x = "Longitude",
391
- y = "Latitude",
392
- color = "Year"
393
  ) +
394
- theme_minimal(base_size = 14)
 
 
 
 
395
  }
396
  } else {
397
  map_hotspots_gg <- ggplot() +
398
- labs(title = "No 'location' column for top 90% days map") +
399
  theme_void()
400
  }
401
-
402
- # Optionally save outputs
403
- if (!is.null(outdir)) {
404
- readr::write_csv(merged_high, file.path(outdir, "merged_df_top90.csv"))
405
- ggsave(file.path(outdir, "daily_plot.png"),
406
- daily_plot, width = 8, height = 5, dpi = 300)
407
- ggsave(file.path(outdir, "top_species_plot.png"),
408
- top_species_plot, width = 7, height = 7, dpi = 300)
409
- ggsave(file.path(outdir, "map_hotspots.png"),
410
- map_hotspots_gg, width = 8, height = 5, dpi = 300)
411
- }
412
-
413
  return(list(
414
  merged_df_all = merged_df_all,
415
  merged_df = merged_high,
@@ -419,353 +330,346 @@ getDeadVertebrates_weeklyLoop <- function(
419
  daily_90th_quant = daily_quantile
420
  ))
421
  }
422
-
423
- ##################################################################
424
- # 3) Shiny App: UI + Server (Weekly Queries)
425
- ##################################################################
426
-
427
  ui <- fluidPage(
428
- theme = shinytheme("cosmo"), # Use a professional theme from shinythemes
429
-
430
- # -- Logo and Title at the top --
431
  fluidRow(
432
- column(
433
- width = 2,
434
- tags$img(src = "www/all_logos.png", height = "400px")
435
- ),
436
- column(
437
- width = 10,
438
- titlePanel("Dead Wildlife Observations from iNaturalist")
439
- )
440
  ),
441
  hr(),
442
-
443
  sidebarLayout(
444
  sidebarPanel(
445
- tabsetPanel(
446
- id = "sidebar_tabs",
447
-
448
- # == Query Panel ==
449
- tabPanel(
450
- title = "Query",
451
- br(),
452
- radioButtons("region_mode", "Region Input Mode:",
453
- choices = c("Enter Numeric place_id" = "place",
454
- "Two-Click Bounding Box" = "bbox"),
455
- # choices = c(
456
- # "Two-Click Bounding Box" = "bbox"),
457
- selected = "bbox"),
458
-
459
- # If user chooses numeric "place_id"
460
- conditionalPanel(
461
- condition = "input.region_mode == 'place'",
462
- numericInput("place_id",
463
- "Numeric place_id (e.g. 1 = USA, 6712 = Canada, 14 = California)",
464
- value = 1, min = 1, max = 999999, step = 1)
465
- ),
466
-
467
- # If user chooses bounding box
468
- conditionalPanel(
469
- condition = "input.region_mode == 'bbox'",
470
- helpText("Left-click once for the SW corner, once more for the NE corner."),
471
- leafletOutput("map_two_click", height = "300px"),
472
- br(),
473
- actionButton("clear_bbox", "Clear bounding box"),
474
- br(), br(),
475
- verbatimTextOutput("bbox_coords")
476
- ),
477
-
478
- # Years
479
- checkboxGroupInput("years", "Select Year(s):",
480
- choices = 2018:2025,
481
- selected = c(2022, 2023)),
482
-
483
- # Query by iconic class or exact species
484
- radioButtons("query_type", "Query By:",
485
- choices = c("Taxon Class" = "iconic",
486
- "Exact Species Name" = "species")),
487
- conditionalPanel(
488
- condition = "input.query_type == 'iconic'",
489
- selectInput("iconic_taxon", "Select Taxon Class:",
490
- choices = c("Aves", "Mammalia", "Reptilia", "Amphibia",
491
- "Actinopterygii", "Mollusca", "Animalia"),
492
- selected = "Aves")
493
- ),
494
- conditionalPanel(
495
- condition = "input.query_type == 'species'",
496
- textInput("species_name", "Enter exact species name (e.g. Puma concolor)", "")
497
- ),
498
-
499
- actionButton("run_query", "Run Query", icon = icon("play")),
500
- hr(),
501
- downloadButton("downloadTop90", "Download Top-90% CSV", icon = icon("download")),
502
- br(), br(),
503
- downloadButton("downloadAll", "Download ALL Data CSV", icon = icon("download"))
504
- ),
505
-
506
- # == About Panel ==
507
- tabPanel(
508
- title = "About",
509
- br(),
510
- p("This Shiny application was created by Diego Ellis Soto (UC Berkeley).
511
- It queries iNaturalist for observations that have been annotated as 'Dead' wildlife (term_id=17, term_value_id=19).
512
- The data is fetched via the iNaturalist API and summarized here for scientific or conservation purposes.")
513
- ),
514
-
515
- # == Participatory Science Panel ==
516
- tabPanel(
517
- title = "Participatory Science",
518
- br(),
519
- p("Citizen science platforms like iNaturalist allow everyday people to collect and share data about local biodiversity.
520
- Recording observations of dead wildlife can help track mortality events, disease spread, and other factors affecting animal populations."),
521
- p("We encourage everyone to contribute their sightings responsibly, ensuring that any data on roadkill or other mortalities can help conservation efforts and
522
- raise public awareness.")
523
- ),
524
-
525
- # == How To Use Panel ==
526
- tabPanel(
527
- title = "How to Use",
528
- br(),
529
- p("This application lets you retrieve data about dead wildlife observations from iNaturalist.
530
- You can choose to manually provide a numeric place_id or define a custom bounding box by clicking twice on the map."),
531
- p("You can also decide whether to query by taxon class (e.g. Aves) or by exact species name (e.g. Puma concolor)."),
532
- p("After selecting your inputs, press 'Run Query.' Two separate CSV downloads are provided: (1) for all data retrieved, and (2) for only the top-90% mortality days (for hotspot analysis).")
533
- )
 
 
 
 
 
 
 
534
  )
535
  ),
536
-
537
  mainPanel(
538
  tabsetPanel(
539
- tabPanel("Daily Time Series", withSpinner(plotOutput("dailyPlot"), type = 6)),
540
- tabPanel("Top Species", withSpinner(plotOutput("speciesPlot"), type = 6)),
541
- tabPanel("Hotspots Map (90th%)", withSpinner(plotOutput("hotspotMap"), type = 6)),
542
- tabPanel("Data Table (Top-90%)", withSpinner(DT::dataTableOutput("dataTable"), type = 6))
 
 
 
 
 
 
543
  )
544
  )
545
  )
546
  )
547
-
548
  server <- function(input, output, session) {
549
-
550
- # Reactive values for bounding box corners
551
- rv <- reactiveValues(
552
- corner1 = NULL,
553
- corner2 = NULL,
554
- bbox = NULL
555
- )
556
-
557
- # Initialize map
558
- output$map_two_click <- renderLeaflet({
559
- leaflet() %>%
560
- addTiles() %>%
561
- setView(lng = -100, lat = 40, zoom = 4)
562
  })
563
-
564
- # Handle bounding box clicks
565
- observeEvent(input$map_two_click_click, {
566
- req(input$region_mode == "bbox")
567
-
568
- click <- input$map_two_click_click
569
- if (is.null(click)) return()
570
-
571
- lat_clicked <- click$lat
572
- lng_clicked <- click$lng
573
-
574
- if (is.null(rv$corner1)) {
575
- rv$corner1 <- c(lat_clicked, lng_clicked)
576
- showNotification("First corner set. Now click for the opposite corner.")
577
-
578
- leafletProxy("map_two_click") %>%
579
- clearMarkers() %>%
580
- addMarkers(lng = lng_clicked, lat = lat_clicked, popup = "Corner 1")
581
-
582
- rv$corner2 <- NULL
583
- rv$bbox <- NULL
584
-
585
- } else {
586
- rv$corner2 <- c(lat_clicked, lng_clicked)
587
-
588
- lat_min <- min(rv$corner1[1], rv$corner2[1])
589
- lat_max <- max(rv$corner1[1], rv$corner2[1])
590
- lng_min <- min(rv$corner1[2], rv$corner2[2])
591
- lng_max <- max(rv$corner1[2], rv$corner2[2])
592
-
593
- rv$bbox <- c(lat_min, lng_min, lat_max, lng_max)
594
-
595
- showNotification("Second corner set. Bounding box defined!", duration = 2)
596
-
597
- leafletProxy("map_two_click") %>%
598
- clearMarkers() %>%
599
- addMarkers(lng = rv$corner1[2], lat = rv$corner1[1], popup = "Corner 1") %>%
600
- addMarkers(lng = rv$corner2[2], lat = rv$corner2[1], popup = "Corner 2") %>%
601
- clearShapes() %>%
602
- addRectangles(
603
- lng1 = lng_min, lat1 = lat_min,
604
- lng2 = lng_max, lat2 = lat_max,
605
- fillColor = "red", fillOpacity = 0.2,
606
- color = "red"
607
- )
608
  }
609
  })
610
-
611
  observeEvent(input$clear_bbox, {
612
- rv$corner1 <- NULL
613
- rv$corner2 <- NULL
614
- rv$bbox <- NULL
615
-
616
- leafletProxy("map_two_click") %>%
617
- clearMarkers() %>%
618
- clearShapes()
619
  })
620
-
621
- output$bbox_coords <- renderText({
622
- req(input$region_mode == "bbox")
623
-
624
- if (is.null(rv$bbox)) {
625
- "No bounding box defined yet."
626
- } else {
627
- paste0(
628
- "Bounding box:\n",
629
- "SW corner: (", rv$bbox[1], ", ", rv$bbox[2], ")\n",
630
- "NE corner: (", rv$bbox[3], ", ", rv$bbox[4], ")"
631
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
632
  }
633
  })
634
-
635
- # Store final query results
 
 
636
  result_data <- reactiveVal(NULL)
637
-
638
- # Main "Run Query" button
639
  observeEvent(input$run_query, {
640
- req(input$years)
641
- shiny::validate(need(length(input$years) > 0, "Please select at least one year."))
642
-
643
- yrs <- as.numeric(input$years)
644
-
645
- # Region logic
646
- place_id_val <- NULL
647
- swlat_val <- NULL
648
- swlng_val <- NULL
649
- nelat_val <- NULL
650
- nelng_val <- NULL
651
-
652
- if (input$region_mode == "place") {
653
- place_id_val <- input$place_id
654
- } else {
655
- shiny::validate(need(!is.null(rv$bbox), "Please click twice on the map to define bounding box."))
656
- swlat_val <- rv$bbox[1]
657
- swlng_val <- rv$bbox[2]
658
- nelat_val <- rv$bbox[3]
659
- nelng_val <- rv$bbox[4]
660
- }
661
-
662
- # Query type logic
663
- iconic_val <- NULL
664
- species_val <- NULL
665
- if (input$query_type == "iconic") {
666
- iconic_val <- input$iconic_taxon
 
 
 
 
 
 
 
 
 
667
  } else {
668
- species_val <- input$species_name
669
- }
670
-
671
- # Fetch data
672
- withProgress(message = 'Fetching data from iNaturalist (Weekly)...', value = 0, {
673
- incProgress(0.4)
674
-
675
- query_res <- getDeadVertebrates_weeklyLoop(
676
- years = yrs,
677
- place_id = place_id_val,
678
- swlat = swlat_val,
679
- swlng = swlng_val,
680
- nelat = nelat_val,
681
- nelng = nelng_val,
682
- iconic_taxa = iconic_val,
683
- taxon_name = species_val
 
 
 
 
 
684
  )
685
-
686
  result_data(query_res)
687
- incProgress(1)
688
- })
689
- })
690
-
691
- # Output plots
692
- output$dailyPlot <- renderPlot({
693
- req(result_data())
694
- result_data()$daily_plot
695
- })
696
-
697
- output$speciesPlot <- renderPlot({
698
- req(result_data())
699
- result_data()$top_species_plot
700
  })
701
-
702
- output$hotspotMap <- renderPlot({
 
703
  req(result_data())
704
- result_data()$map_hotspots_gg
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
705
  })
706
-
707
- # Output data table (top-90% subset)
 
708
  output$dataTable <- DT::renderDataTable({
709
  req(result_data())
710
- df <- result_data()$merged_df # top 90% subset
711
-
712
  if (nrow(df) == 0) {
713
- return(DT::datatable(
714
- data.frame(Message = "No records found"),
715
- options = list(pageLength = 20) # Show 20 records
716
- ))
717
  }
718
-
719
- df <- df %>%
720
- mutate(
721
- inat_link = paste0(
722
- "<a href='https://www.inaturalist.org/observations/",
723
- id, "' target='_blank'>", id, "</a>"
724
- )
725
- )
726
-
727
- photo_col <- "taxon.default_photo.square_url"
728
- if (photo_col %in% names(df)) {
729
- df$image_thumb <- ifelse(
730
- !is.na(df[[photo_col]]) & df[[photo_col]] != "",
731
- paste0("<img src='", df[[photo_col]], "' width='50'/>"),
732
- "No Img"
733
- )
734
  } else {
735
- df$image_thumb <- "No Img"
736
  }
737
-
 
 
 
 
 
 
 
 
 
 
 
 
738
  show_cols <- c(
739
- "inat_link", "image_thumb", "taxon.name", "created_at_details.date",
740
- setdiff(names(df), c("inat_link", "image_thumb", "taxon.name", "created_at_details.date"))
741
- )
742
-
743
- DT::datatable(
744
- df[, show_cols, drop = FALSE],
745
- escape = FALSE,
746
- options = list(pageLength = 20, autoWidth = TRUE) # Page 50 prior
747
  )
 
 
748
  })
749
-
750
- # Download handlers
751
- output$downloadTop90 <- downloadHandler(
752
- filename = function() {
753
- paste0("inat_dead_top90_", Sys.Date(), ".csv")
754
- },
755
- content = function(file) {
756
- req(result_data())
757
- readr::write_csv(result_data()$merged_df, file)
758
- }
759
- )
760
-
761
  output$downloadAll <- downloadHandler(
762
- filename = function() {
763
- paste0("inat_dead_ALL_", Sys.Date(), ".csv")
764
- },
765
- content = function(file) {
766
- req(result_data())
767
- readr::write_csv(result_data()$merged_df_all, file)
768
- }
769
  )
770
  }
771
 
 
1
  ##################################################################
2
+ # Dead Wildlife iNaturalist Shiny Professional Version (Live/Archive, All Data Table Fixed)
 
3
  ##################################################################
4
 
5
+ # Install and load required packages
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  required_packages <- c(
7
  "httr", "jsonlite", "tidyverse", "glue", "lubridate",
8
+ "wesanderson", "viridis", "hexbin", "shinycssloaders",
9
  "DT", "maps", "mapdata", "leaflet", "leaflet.extras",
10
+ "shinythemes", "shiny", "arrow"
11
  )
 
12
  installed_packages <- rownames(installed.packages())
13
  for (pkg in required_packages) {
14
+ if (!pkg %in% installed_packages) install.packages(pkg, dependencies = TRUE)
 
 
15
  }
 
16
  library(httr)
17
  library(jsonlite)
18
  library(tidyverse)
 
20
  library(lubridate)
21
  library(wesanderson)
22
  library(viridis)
23
+ library(hexbin)
24
  library(shinycssloaders)
25
  library(DT)
26
  library(maps)
 
29
  library(leaflet.extras)
30
  library(shinythemes)
31
  library(shiny)
32
+ library(arrow)
33
+
34
+ # Path to your archived parquet file (update if needed)
35
+ # parquet_path <- "~/inat_all_Apr122025.parquet"
36
+ parquet_path <- "https://huggingface.co/datasets/diegoellissoto/iNaturalist_mortality_records_12Apr2025/resolve/main/inat_all_Apr122025.parquet"
37
+ # ------------------- Helper Functions ---------------------
38
+ make_daily_plot <- function(df, start_date, end_date) {
39
+ if (!"observed_on" %in% names(df)) return(ggplot() + theme_void() + labs(title = "No date info"))
40
+ if (nrow(df) == 0) return(ggplot() + theme_void() + labs(title = "No data"))
41
+ df <- df %>%
42
+ mutate(obs_date = as.Date(observed_on),
43
+ Window = format(obs_date, "%Y")) %>%
44
+ filter(!is.na(obs_date))
45
+ counts_by_day <- df %>%
46
+ group_by(Window, obs_date) %>%
47
+ summarise(n = n_distinct(id), .groups = "drop")
48
+ y_max_value <- max(counts_by_day$n, na.rm = TRUE)
49
+ ggplot(counts_by_day, aes(x = obs_date, y = n, color = Window)) +
50
+ geom_line(size = 1.2) +
51
+ geom_point(size = 2) +
52
+ scale_x_date(date_labels = "%b %d", date_breaks = "1 month") +
53
+ scale_y_continuous(limits = c(0, y_max_value)) +
54
+ labs(
55
+ title = glue("Daily 'Dead' Observations ({start_date} to {end_date})"),
56
+ x = "Date",
57
+ y = "Number of Observations",
58
+ color = "Year"
59
+ ) +
60
+ theme_minimal(base_size = 14) +
61
+ theme(axis.text.x = element_text(angle = 45, hjust = 1))
62
+ }
63
 
64
+ make_top_species_plot <- function(df) {
65
+ if (!"scientific_name" %in% names(df)) return(ggplot() + theme_void() + labs(title = "No species info"))
66
+ if (nrow(df) == 0) return(ggplot() + theme_void() + labs(title = "No data"))
67
+ df <- df %>%
68
+ mutate(obs_date = as.Date(observed_on),
69
+ Window = format(obs_date, "%Y"))
70
+ species_counts <- df %>%
71
+ filter(!is.na(scientific_name)) %>%
72
+ group_by(Window, scientific_name) %>%
73
+ summarise(dead_count = n(), .groups = "drop")
74
+ top_species_overall <- species_counts %>%
75
+ group_by(scientific_name) %>%
76
+ summarise(total_dead = sum(dead_count)) %>%
77
+ arrange(desc(total_dead)) %>%
78
+ slice_head(n = 20)
79
+ species_top20 <- species_counts %>%
80
+ filter(scientific_name %in% top_species_overall$scientific_name)
81
+ ggplot(species_top20, aes(
82
+ x = reorder(scientific_name, -dead_count),
83
+ y = dead_count,
84
+ fill= Window
85
+ )) +
86
+ geom_col(position = position_dodge(width = 0.7)) +
87
+ coord_flip() +
88
+ labs(
89
+ title = "Top 20 Species with 'Dead' Observations",
90
+ x = "Species",
91
+ y = "Number of Dead Observations",
92
+ fill = "Year"
93
+ ) +
94
+ theme_minimal(base_size = 14)
95
+ }
96
 
97
+ make_hexbin_map <- function(df, start_date, end_date) {
98
+ if (!("latitude" %in% names(df) && "longitude" %in% names(df))) {
99
+ return(ggplot() + labs(title = "No spatial data available for map") + theme_void())
100
+ }
101
+ df <- df %>% filter(!is.na(latitude) & !is.na(longitude))
102
+ if (nrow(df) == 0) {
103
+ return(ggplot() + labs(title = "No spatial data available for map") + theme_void())
104
+ }
105
+ x_limits <- range(df$longitude, na.rm = TRUE)
106
+ y_limits <- range(df$latitude, na.rm = TRUE)
107
+ ggplot() +
108
+ borders("world", fill = "gray80", colour = "white") +
109
+ stat_bin_hex(
110
+ data = df,
111
+ aes(x = longitude, y = latitude),
112
+ bins = 500,
113
+ color = "black",
114
+ alpha = 0.8
115
+ ) +
116
+ # scale_fill_viridis_c(option = "plasma", name = "Observation Count") +
117
+ scale_fill_viridis_c(option = "magma", name = "Obs. Count", trans="sqrt") +
118
+ coord_quickmap(xlim = x_limits, ylim = y_limits, expand = TRUE) +
119
+ labs(
120
+ title = glue("'Dead' Wildlife Hexbin Map ({start_date} to {end_date})"),
121
+ x = "Longitude",
122
+ y = "Latitude"
123
+ ) +
124
+ theme_classic(base_size = 14) +
125
+ theme(
126
+ axis.text = element_text(face = "bold", size = 14, colour = "black"),
127
+ axis.title = element_text(face = "bold", size = 16, colour = "black")
128
+ )
129
+ }
130
+
131
+ get_high_mortality_days <- function(df) {
132
+ if (!"observed_on" %in% names(df)) return(NULL)
133
+ df <- df %>% mutate(obs_date = as.Date(observed_on))
134
+ counts_by_day <- df %>%
135
+ group_by(obs_date) %>%
136
+ summarise(n = n_distinct(id), .groups = "drop")
137
+ if (nrow(counts_by_day) == 0) return(NULL)
138
+ daily_quantile <- quantile(counts_by_day$n, probs = 0.90, na.rm = TRUE)
139
+ high_days <- counts_by_day %>% filter(n >= daily_quantile) %>% pull(obs_date)
140
+ list(days = high_days, quant = daily_quantile)
141
+ }
142
+
143
+ # -- API Fetch/Progress bar logic (Live mode) --
144
  fetch_dead_data_once <- function(
145
+ swlat, swlng, nelat, nelng,
146
+ start_date, end_date,
147
+ iconic_taxa = NULL, taxon_name = NULL,
148
+ per_page = 200, max_pages = 200, progress = NULL
 
 
 
 
 
 
 
 
149
  ) {
150
  base_url <- "https://api.inaturalist.org/v1/observations"
 
151
  q_parts <- list(
152
+ "term_id=17", "term_value_id=19", "verifiable=true",
153
+ glue("d1={start_date}"), glue("d2={end_date}"),
154
+ "order=desc", "order_by=created_at", glue("per_page={per_page}")
 
 
 
 
 
155
  )
156
+ if (!is.null(iconic_taxa) && iconic_taxa != "") q_parts <- c(q_parts, glue("iconic_taxa={iconic_taxa}"))
157
+ if (!is.null(taxon_name) && taxon_name != "") q_parts <- c(q_parts, glue("taxon_name={URLencode(taxon_name)}"))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
158
  query_params <- paste(q_parts, collapse = "&")
159
+ loc_part <- glue("&nelat={nelat}&nelng={nelng}&swlat={swlat}&swlng={swlng}")
 
 
 
 
 
 
 
 
 
 
 
160
  observations_list <- list()
161
  current_page <- 1
 
162
  while (current_page <= max_pages) {
163
+ if (!is.null(progress)) progress$set(detail = glue("API page {current_page}"), value = NULL)
164
+ query_url <- paste0(base_url, "?", query_params, "&page=", current_page, loc_part)
 
 
 
 
 
165
  resp <- GET(query_url)
166
  if (http_error(resp)) {
167
  warning("HTTP error on page ", current_page, ": ", status_code(resp))
168
  break
169
  }
 
170
  parsed <- content(resp, as = "text", encoding = "UTF-8") %>%
171
  fromJSON(flatten = TRUE)
172
+ if (length(parsed$results) == 0) break
 
 
 
 
 
173
  obs_page_df <- as_tibble(parsed$results)
174
  observations_list[[current_page]] <- obs_page_df
175
+ if (nrow(obs_page_df) < per_page) break
 
 
 
 
 
 
176
  current_page <- current_page + 1
177
+ Sys.sleep(1.4)
178
  }
179
+ bind_rows(observations_list)
 
 
180
  }
181
 
182
+ getDeadVertebrates_dateRange <- function(
183
+ start_date, end_date,
184
+ swlat, swlng, nelat, nelng,
185
+ iconic_taxa = NULL, taxon_name = NULL,
186
+ per_page = 500, max_pages = 500,
187
+ .shiny_progress = NULL
 
 
 
 
 
 
 
188
  ) {
189
+ start_date <- as.Date(start_date)
190
+ end_date <- as.Date(end_date)
191
+ week_starts <- seq.Date(start_date, end_date, by = "1 week")
192
+ all_weeks_list <- list()
 
 
 
 
193
  for (i in seq_along(week_starts)) {
194
+ st <- week_starts[i]
195
+ ed <- if (i < length(week_starts)) week_starts[i + 1] - 1 else end_date
196
+ if (!is.null(.shiny_progress)) {
197
+ .shiny_progress$set(
198
+ value = (i-1)/length(week_starts),
199
+ message = glue("Live Query: Fetching week {i} of {length(week_starts)}"),
200
+ detail = glue("Dates: {st} to {ed}")
201
+ )
202
  }
 
 
 
 
203
  df_week <- fetch_dead_data_once(
204
+ swlat, swlng, nelat, nelng,
205
+ start_date = st, end_date = ed,
206
+ iconic_taxa = iconic_taxa, taxon_name = taxon_name,
207
+ per_page = per_page, max_pages = max_pages,
208
+ progress = .shiny_progress
 
 
 
 
 
 
 
209
  )
210
+ all_weeks_list[[i]] <- df_week
211
+ Sys.sleep(1.4)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
212
  }
213
+ merged_df_all <- bind_rows(all_weeks_list)
214
+ # Everything else unchanged (see previous versions, e.g. make plots, high mortality)
215
+ if (!"created_at_details.date" %in% names(merged_df_all) || nrow(merged_df_all) == 0) {
216
+ placeholder_plot <- function(title) {
217
+ ggplot() + labs(title = title, x = NULL, y = NULL) + theme_void()
218
+ }
 
 
 
 
 
 
 
 
 
 
 
 
219
  return(list(
220
  merged_df_all = merged_df_all,
221
  merged_df = merged_df_all,
222
+ daily_plot = placeholder_plot("No 'Dead' Observations Found"),
223
+ top_species_plot = placeholder_plot("No species data"),
224
+ map_hotspots_gg = placeholder_plot("No data for map"),
225
  daily_90th_quant = NA
226
  ))
227
  }
228
+ merged_df_all <- merged_df_all %>%
229
+ mutate(obs_date = as.Date(observed_on),
230
+ Window = format(obs_date, "%Y"))
 
 
 
 
 
 
 
231
  counts_by_day <- merged_df_all %>%
 
232
  group_by(Window, obs_date) %>%
233
  summarise(n = n_distinct(id), .groups = "drop")
 
234
  y_max_value <- max(counts_by_day$n, na.rm = TRUE)
 
 
 
 
235
  daily_plot <- ggplot(counts_by_day, aes(x = obs_date, y = n, color = Window)) +
236
  geom_line(size = 1.2) +
237
  geom_point(size = 2) +
238
+ scale_x_date(date_labels = "%b %d", date_breaks = "1 month") +
 
239
  scale_y_continuous(limits = c(0, y_max_value)) +
240
  labs(
241
+ title = glue("Daily 'Dead' Observations ({start_date} to {end_date})"),
242
+ x = "Date",
243
  y = "Number of Observations",
244
  color = "Year"
245
  ) +
246
  theme_minimal(base_size = 14) +
247
  theme(axis.text.x = element_text(angle = 45, hjust = 1))
 
 
248
  if ("taxon.name" %in% names(merged_df_all)) {
249
  species_counts <- merged_df_all %>%
250
  filter(!is.na(taxon.name)) %>%
251
  group_by(Window, taxon.name) %>%
252
  summarise(dead_count = n(), .groups = "drop")
 
253
  top_species_overall <- species_counts %>%
254
  group_by(taxon.name) %>%
255
  summarise(total_dead = sum(dead_count)) %>%
256
  arrange(desc(total_dead)) %>%
257
  slice_head(n = 20)
 
258
  species_top20 <- species_counts %>%
259
  filter(taxon.name %in% top_species_overall$taxon.name)
 
260
  top_species_plot <- ggplot(species_top20, aes(
261
  x = reorder(taxon.name, -dead_count),
262
  y = dead_count,
 
264
  )) +
265
  geom_col(position = position_dodge(width = 0.7)) +
266
  coord_flip() +
 
 
267
  labs(
268
  title = "Top 20 Species with 'Dead' Observations",
269
  x = "Species",
 
276
  labs(title = "No 'taxon.name' column found", x = NULL, y = NULL) +
277
  theme_void()
278
  }
 
 
279
  daily_quantile <- quantile(counts_by_day$n, probs = 0.90, na.rm = TRUE)
280
  high_mortality_days <- counts_by_day %>%
281
  filter(n >= daily_quantile) %>%
282
  pull(obs_date)
 
283
  merged_high <- merged_df_all %>%
 
284
  filter(obs_date %in% high_mortality_days)
285
+ if ("location" %in% names(merged_df_all)) {
286
+ location_df_all <- merged_df_all %>%
 
 
287
  filter(!is.na(location) & location != "") %>%
288
  separate(location, into = c("lat_str", "lon_str"), sep = ",", remove = FALSE) %>%
289
+ mutate(latitude = as.numeric(lat_str), longitude = as.numeric(lon_str))
290
+ if (nrow(location_df_all) == 0) {
 
 
 
 
291
  map_hotspots_gg <- ggplot() +
292
+ labs(title = "No spatial data available for map") +
293
  theme_void()
294
  } else {
295
+ x_limits <- range(location_df_all$longitude, na.rm = TRUE)
296
+ y_limits <- range(location_df_all$latitude, na.rm = TRUE)
297
+ map_hotspots_gg <- ggplot() +
 
 
 
298
  borders("world", fill = "gray80", colour = "white") +
299
+ stat_bin_hex(
300
+ data = location_df_all,
301
+ aes(x = longitude, y = latitude),
302
+ bins = 500,
303
+ color = "black",
304
+ alpha = 0.8
305
  ) +
306
+ scale_fill_viridis_c(option = "plasma", name = "Observation Count") +
307
+ coord_quickmap(xlim = x_limits, ylim = y_limits, expand = TRUE) +
308
  labs(
309
+ title = glue("'Dead' Wildlife Hexbin Map ({start_date} to {end_date})"),
310
+ x = "Longitude",
311
+ y = "Latitude"
 
312
  ) +
313
+ theme_classic(base_size = 14) +
314
+ theme(
315
+ axis.text = element_text(face = "bold", size = 14, colour = "black"),
316
+ axis.title = element_text(face = "bold", size = 16, colour = "black")
317
+ )
318
  }
319
  } else {
320
  map_hotspots_gg <- ggplot() +
321
+ labs(title = "No 'location' column for map") +
322
  theme_void()
323
  }
 
 
 
 
 
 
 
 
 
 
 
 
324
  return(list(
325
  merged_df_all = merged_df_all,
326
  merged_df = merged_high,
 
330
  daily_90th_quant = daily_quantile
331
  ))
332
  }
333
+ # ------------------------ UI --------------------------
 
 
 
 
334
  ui <- fluidPage(
335
+ theme = shinytheme("cosmo"),
 
 
336
  fluidRow(
337
+ column(width = 2, tags$img(src = "www/all_logos.png", height = "400px")),
338
+ column(width = 10, titlePanel("Dead Wildlife Observations from iNaturalist"))
 
 
 
 
 
 
339
  ),
340
  hr(),
 
341
  sidebarLayout(
342
  sidebarPanel(
343
+ tabsetPanel(id = "sidebar_tabs",
344
+ tabPanel("Query",
345
+ br(),
346
+ radioButtons(
347
+ "data_source", "Data Source:",
348
+ choices = c("Download Live from iNaturalist" = "live", "Archived Parquet File" = "archived"),
349
+ selected = "live"
350
+ ),
351
+ tags$div(
352
+ style="margin-bottom:8px;",
353
+ textInput("region_search", "Find place (type and click Search)", value = "", placeholder = "e.g. California, Uruguay, Yellowstone"),
354
+ actionButton("region_search_btn", "Search", icon=icon("search"))
355
+ ),
356
+ tags$div(
357
+ style = "margin-bottom:10px;",
358
+ leafletOutput("select_map", height = "340px"),
359
+ actionButton("clear_bbox", "Clear Bounding Box", icon = icon("eraser")),
360
+ helpText("Draw a rectangle or search for a place. Only one region (rectangle) is used at a time.")
361
+ ),
362
+ verbatimTextOutput("bbox_coords"),
363
+ dateRangeInput("date_range", "Select Date Range:",
364
+ start = Sys.Date() - 365,
365
+ end = Sys.Date(),
366
+ min = "2010-01-01",
367
+ max = Sys.Date()),
368
+ radioButtons("query_type", "Query By:",
369
+ choices = c("Taxon Class" = "iconic", "Exact Species Name" = "species")),
370
+ conditionalPanel(
371
+ condition = "input.query_type == 'iconic'",
372
+ selectInput("iconic_taxon", "Select Taxon Class:",
373
+ choices = c("Aves", "Mammalia", "Reptilia", "Amphibia", "Actinopterygii", "Mollusca", "Animalia"),
374
+ selected = "Aves")
375
+ ),
376
+ conditionalPanel(
377
+ condition = "input.query_type == 'species'",
378
+ textInput("species_name", "Enter exact species name (e.g. Puma concolor)", "")
379
+ ),
380
+ actionButton("run_query", "Run Query", icon = icon("play")),
381
+ hr(),
382
+ downloadButton("downloadAll", "Download ALL Data CSV", icon = icon("download"))
383
+ ),
384
+ tabPanel("About",
385
+ tags$h3("iNaturalist, Dead Wildlife, and Participatory Science"),
386
+ tags$p("iNaturalist is a global biodiversity platform powered by a vibrant community of naturalists, scientists, students, and citizens. Its open data and easy smartphone app allow anyone to record nature and contribute to science."),
387
+ tags$h4("Why observe dead wildlife?"),
388
+ tags$ul(
389
+ tags$li("Track disease outbreaks and mass die-offs (e.g. avian influenza, amphibian disease)."),
390
+ tags$li("Identify human-wildlife conflicts (e.g. roadkill, window strikes)."),
391
+ tags$li("Detect range shifts and rare events."),
392
+ tags$li("Monitor mortality of threatened or sensitive species.")
393
+ ),
394
+ tags$p("Documenting dead wildlife—even if unpleasant—can save species by detecting threats early."),
395
+ tags$h4("About this App"),
396
+ tags$p("This app was created by Diego Ellis-Soto (UC Berkeley) and colleagues to empower rapid, open exploration of wildlife mortality patterns worldwide. It is open source and intended for research, conservation, and education."),
397
+ tags$blockquote("Ellis-Soto D., Taylor L., Edson E., Schell C., Boettiger C., Johnson R. (2024). Global, near real-time ecological forecasting of mortality events through participatory science
398
+
399
+ https://github.com/diego-ellis-soto/iNat_mortality_detector"),
400
+ tags$h4("Technical Info"),
401
+ tags$ul(
402
+ tags$li("iNaturalist API v1 (Live Mode) and Parquet snapshot (Archive Mode).")
403
+ ),
404
+ tags$h4("FAQ"),
405
+ tags$dl(
406
+ tags$dt("Can I use this data for research/publication?"),
407
+ tags$dd("Yes! Always credit iNaturalist and respect original content licenses. See iNaturalist's Data Use Policy."),
408
+ tags$dt("Why is the map sometimes empty?"),
409
+ tags$dd("Some species/locations are obscured for privacy, or there may be no recent observations in your selected area and time."),
410
+ tags$dt("Are locations accurate?"),
411
+ tags$dd("Coordinate accuracy varies by observer and privacy settings."),
412
+ tags$dt("Can I see private/sensitive records?"),
413
+ tags$dd("No—privacy and ethical protection is strictly respected by the iNaturalist API and this app.")
414
+ ),
415
+ tags$h4("Responsible Use"),
416
+ tags$p("Never disturb wildlife for photos. Be cautious with sensitive data. Community-driven science works best when it's ethical and transparent."),
417
+ tags$h4("Get Involved!"),
418
+ tags$p("Join iNaturalist, share your own records, or help identify others' observations. Every data point helps conservation.")
419
+ ),
420
+ tabPanel("How to Use",
421
+ tags$h3("Quick Start Guide"),
422
+ tags$ol(
423
+ tags$li("Search for a place or draw a rectangle on the map (one region at a time)."),
424
+ tags$li("Set your date range. For best speed, keep queries focused."),
425
+ tags$li("Choose a taxon class or enter a species name."),
426
+ tags$li("Pick 'Live' for the latest data (slower, but up-to-date) or 'Archive' for instant results (fixed snapshot)."),
427
+ tags$li("Click Run Query. Visualizations and tables will update below!"),
428
+ tags$li("Download the full results table as CSV for further analysis.")
429
+ ),
430
+ tags$h4("Tips"),
431
+ tags$ul(
432
+ tags$li("Use Archive mode for large or exploratory queries—it is much faster."),
433
+ tags$li("Live mode fetches week-by-week and may take minutes for big regions or long periods (a progress bar helps you track progress)."),
434
+ tags$li("To reset your selected area, click 'Clear Bounding Box'.")
435
+ ),
436
+ tags$h4("Contact & Support"),
437
+ tags$p("For questions or feedback, visit our GitHub repository or email the authors.")
438
+ )
439
  )
440
  ),
 
441
  mainPanel(
442
  tabsetPanel(
443
+ # ----------- CHANGED: summary beside plot -----------
444
+ tabPanel("Daily Time Series",
445
+ fluidRow(
446
+ column(width = 8, withSpinner(plotOutput("dailyPlot"), type = 6)),
447
+ column(width = 4, verbatimTextOutput("dailySummary"))
448
+ )
449
+ ),
450
+ tabPanel("Top Species", withSpinner(plotOutput("speciesPlot"), type = 6)),
451
+ tabPanel("Hexbin Map (All Data)", withSpinner(plotOutput("hotspotMap"), type = 6)),
452
+ tabPanel("All Data Table", withSpinner(DT::dataTableOutput("dataTable"), type = 6))
453
  )
454
  )
455
  )
456
  )
457
+ # -------------------- SERVER --------------------------
458
  server <- function(input, output, session) {
459
+ rv <- reactiveValues(bbox = NULL)
460
+ output$select_map <- renderLeaflet({
461
+ leaflet() %>% addTiles() %>%
462
+ setView(lng = -95, lat = 40, zoom = 3) %>%
463
+ addDrawToolbar(
464
+ targetGroup = "drawn_bboxes",
465
+ rectangleOptions = drawRectangleOptions(repeatMode = FALSE),
466
+ polylineOptions = FALSE, circleOptions = FALSE,
467
+ markerOptions = FALSE, circleMarkerOptions = FALSE,
468
+ polygonOptions = FALSE, editOptions = editToolbarOptions()
469
+ )
 
 
470
  })
471
+ observeEvent(input$select_map_draw_new_feature, {
472
+ feat <- input$select_map_draw_new_feature
473
+ if (!is.null(feat$geometry) && feat$geometry$type == "Polygon") {
474
+ coords <- feat$geometry$coordinates[[1]]
475
+ lngs <- vapply(coords, function(x) x[[1]], numeric(1))
476
+ lats <- vapply(coords, function(x) x[[2]], numeric(1))
477
+ rv$bbox <- c(min(lats), min(lngs), max(lats), max(lngs))
478
+ }
479
+ })
480
+ observeEvent(input$select_map_draw_deleted_features, { rv$bbox <- NULL })
481
+ observeEvent(input$select_map_draw_edited_features, {
482
+ if (!is.null(input$select_map_draw_all_features)) {
483
+ feats <- input$select_map_draw_all_features$features
484
+ if (length(feats) > 0) {
485
+ feat <- feats[[length(feats)]]
486
+ if (!is.null(feat$geometry) && feat$geometry$type == "Polygon") {
487
+ coords <- feat$geometry$coordinates[[1]]
488
+ lngs <- vapply(coords, function(x) x[[1]], numeric(1))
489
+ lats <- vapply(coords, function(x) x[[2]], numeric(1))
490
+ rv$bbox <- c(min(lats), min(lngs), max(lats), max(lngs))
491
+ }
492
+ }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
493
  }
494
  })
 
495
  observeEvent(input$clear_bbox, {
496
+ rv$bbox <- NULL
497
+ leafletProxy("select_map") %>%
498
+ clearGroup("drawn_bboxes") %>%
499
+ clearGroup("search_bbox")
 
 
 
500
  })
501
+ observeEvent(input$region_search_btn, {
502
+ loc <- input$region_search
503
+ if (!is.null(loc) && nzchar(loc)) {
504
+ url <- paste0("https://nominatim.openstreetmap.org/search?format=json&q=", URLencode(loc))
505
+ res <- tryCatch(jsonlite::fromJSON(url), error=function(e) NULL)
506
+ if (!is.null(res) && nrow(res) >= 1 && !is.null(res$boundingbox[1])) {
507
+ bbox_bb <- res$boundingbox[1][[1]]
508
+ if (is.character(bbox_bb) && length(bbox_bb) == 4) {
509
+ bbox_raw <- as.numeric(bbox_bb)
510
+ } else if (is.list(bbox_bb) && length(bbox_bb) == 4) {
511
+ bbox_raw <- as.numeric(unlist(bbox_bb))
512
+ } else if (is.character(res$boundingbox[1])) {
513
+ bbox_raw <- as.numeric(unlist(strsplit(res$boundingbox[1], ",")))
514
+ } else {
515
+ bbox_raw <- NULL
516
+ }
517
+ if (!is.null(bbox_raw) && length(bbox_raw) == 4 && all(!is.na(bbox_raw))) {
518
+ bbox <- c(bbox_raw[1], bbox_raw[3], bbox_raw[2], bbox_raw[4])
519
+ leafletProxy("select_map") %>%
520
+ clearGroup("search_bbox") %>%
521
+ addRectangles(
522
+ lng1 = bbox[2], lat1 = bbox[1], lng2 = bbox[4], lat2 = bbox[3],
523
+ fillColor = "red", fillOpacity = 0.1, color = "red", group = "search_bbox"
524
+ ) %>%
525
+ fitBounds(lng1 = bbox[2], lat1 = bbox[1], lng2 = bbox[4], lat2 = bbox[3])
526
+ rv$bbox <- bbox
527
+ } else {
528
+ showNotification("Unexpected bounding box format from geocoder.", type = "error", duration = 6)
529
+ }
530
+ } else {
531
+ showNotification("Could not geocode this place. Try a different name.", type = "warning", duration = 5)
532
+ }
533
  }
534
  })
535
+ output$bbox_coords <- renderText({
536
+ if (is.null(rv$bbox)) "No bounding box defined yet. Search for a place or draw a rectangle." else paste0(
537
+ "Bounding box:\nSW: (", round(rv$bbox[1], 4), ", ", round(rv$bbox[2], 4), ")\nNE: (", round(rv$bbox[3], 4), ", ", round(rv$bbox[4], 4), ")")
538
+ })
539
  result_data <- reactiveVal(NULL)
 
 
540
  observeEvent(input$run_query, {
541
+ req(input$date_range)
542
+ req(rv$bbox)
543
+ start_date <- as.Date(input$date_range[1])
544
+ end_date <- as.Date(input$date_range[2])
545
+ swlat <- rv$bbox[1]; swlng <- rv$bbox[2]; nelat <- rv$bbox[3]; nelng <- rv$bbox[4]
546
+ if (input$data_source == "archived") {
547
+ # req(file.exists(parquet_path))
548
+ inat_all_raw <- arrow::read_parquet(parquet_path)# %>% as_tibble()
549
+ inat_all <- inat_all_raw %>%
550
+ filter(!is.na(latitude) & !is.na(longitude)) %>%
551
+ filter(latitude >= swlat, latitude <= nelat,
552
+ longitude >= swlng, longitude <= nelng) |> collect()
553
+ if ("observed_on" %in% names(inat_all)) {
554
+ inat_all <- inat_all %>%
555
+ filter(!is.na(observed_on)) %>%
556
+ filter(as.Date(observed_on) >= start_date, as.Date(observed_on) <= end_date)
557
+ }
558
+ if (input$query_type == "iconic" && !is.null(input$iconic_taxon) && input$iconic_taxon != "" &&
559
+ "iconic_taxon_name" %in% names(inat_all)) {
560
+ inat_all <- inat_all %>% filter(iconic_taxon_name == input$iconic_taxon)
561
+ }
562
+ if (input$query_type == "species" && !is.null(input$species_name) && input$species_name != "" &&
563
+ "scientific_name" %in% names(inat_all)) {
564
+ inat_all <- inat_all %>% filter(scientific_name == input$species_name)
565
+ }
566
+ hm <- get_high_mortality_days(inat_all)
567
+ merged_high <- if (!is.null(hm$days)) inat_all %>% filter(as.Date(observed_on) %in% hm$days) else inat_all
568
+ query_res <- list(
569
+ merged_df_all = inat_all,
570
+ merged_df = merged_high,
571
+ daily_plot = make_daily_plot(inat_all, start_date, end_date),
572
+ top_species_plot = make_top_species_plot(inat_all),
573
+ map_hotspots_gg = make_hexbin_map(inat_all, start_date, end_date),
574
+ daily_90th_quant = if (!is.null(hm$quant)) hm$quant else NA
575
+ )
576
+ result_data(query_res)
577
  } else {
578
+ iconic_val <- if (input$query_type == "iconic") input$iconic_taxon else NULL
579
+ species_val <- if (input$query_type == "species") input$species_name else NULL
580
+ week_starts <- seq.Date(start_date, end_date, by = "1 week")
581
+ showNotification(
582
+ paste("Live Mode: About to fetch", length(week_starts),
583
+ "weeks from iNaturalist API. This may take several minutes for large queries."),
584
+ duration = 7, type = "warning"
585
+ )
586
+ progress <- shiny::Progress$new()
587
+ on.exit(progress$close())
588
+ progress$set(message = paste("Live Query: Fetching", length(week_starts), "weeks"), value = 0)
589
+ query_res <- getDeadVertebrates_dateRange(
590
+ start_date = start_date,
591
+ end_date = end_date,
592
+ swlat = swlat,
593
+ swlng = swlng,
594
+ nelat = nelat,
595
+ nelng = nelng,
596
+ iconic_taxa = iconic_val,
597
+ taxon_name = species_val,
598
+ .shiny_progress = progress
599
  )
 
600
  result_data(query_res)
601
+ }
 
 
 
 
 
 
 
 
 
 
 
 
602
  })
603
+ output$dailyPlot <- renderPlot({ req(result_data()); result_data()$daily_plot })
604
+
605
+ output$dailySummary <- renderText({
606
  req(result_data())
607
+ df <- result_data()$merged_df_all
608
+ if (nrow(df) == 0 || !"observed_on" %in% names(df)) return("No data available.")
609
+ df <- df %>% mutate(obs_date = as.Date(observed_on)) %>% filter(!is.na(obs_date))
610
+ n_obs <- nrow(df)
611
+ n_days <- n_distinct(df$obs_date)
612
+ span_days <- if (n_days > 1) paste0(range(df$obs_date, na.rm=TRUE), collapse=" to ") else as.character(unique(df$obs_date))
613
+ counts_by_day <- df %>% count(obs_date)
614
+ peak <- counts_by_day %>% filter(n == max(n)) %>% pull(obs_date)
615
+ peak_val <- max(counts_by_day$n)
616
+ avg_day <- round(mean(counts_by_day$n), 2)
617
+ paste0(
618
+ "Summary:\n",
619
+ "- Total mortality records: ", n_obs, "\n",
620
+ "- Date range: \n", span_days, "\n",
621
+ "- Days with data: ", n_days, "\n",
622
+ "- Average per day: ", avg_day, "\n",
623
+ "- Peak day: ", paste(peak, collapse = ", "), " (", peak_val, " records)\n",
624
+ if (peak_val > avg_day*2) "- Spike in mortality observations" else ""
625
+ )
626
+
627
  })
628
+
629
+ output$speciesPlot <- renderPlot({ req(result_data()); result_data()$top_species_plot })
630
+ output$hotspotMap <- renderPlot({ req(result_data()); result_data()$map_hotspots_gg })
631
  output$dataTable <- DT::renderDataTable({
632
  req(result_data())
633
+ df <- result_data()$merged_df_all
 
634
  if (nrow(df) == 0) {
635
+ return(DT::datatable(data.frame(Message = "No records found"), options = list(pageLength = 20)))
 
 
 
636
  }
637
+ # --- Fix column naming for scientific_name ---
638
+ if (!"scientific_name" %in% names(df) && "taxon.name" %in% names(df)) {
639
+ df$scientific_name <- df$taxon.name
640
+ }
641
+ # --- Add inat_link ---
642
+ if ("id" %in% names(df)) {
643
+ df$inat_link <- paste0("<a href='https://www.inaturalist.org/observations/", df$id, "' target='_blank'>", df$id, "</a>")
 
 
 
 
 
 
 
 
 
644
  } else {
645
+ df$inat_link <- NA
646
  }
647
+ # --- Fix image column (robust to both sources) ---
648
+ df$image_thumb <- "No Img"
649
+ if ("image_url" %in% names(df)) {
650
+ df$image_thumb <- ifelse(!is.na(df$image_url) & df$image_url != "", paste0("<img src='", df$image_url, "' width='50'/>"), "No Img")
651
+ } else if ("taxon.default_photo.square_url" %in% names(df)) {
652
+ df$image_thumb <- ifelse(!is.na(df$taxon.default_photo.square_url) & df$taxon.default_photo.square_url != "", paste0("<img src='", df$taxon.default_photo.square_url, "' width='50'/>"), "No Img")
653
+ } else if ("taxon" %in% names(df)) {
654
+ taxon_photo <- sapply(df$taxon, function(x) {
655
+ if (is.list(x) && "default_photo" %in% names(x) && !is.null(x$default_photo$square_url)) x$default_photo$square_url else NA
656
+ })
657
+ df$image_thumb <- ifelse(!is.na(taxon_photo) & taxon_photo != "", paste0("<img src='", taxon_photo, "' width='50'/>"), "No Img")
658
+ }
659
+ # --- Show columns ---
660
  show_cols <- c(
661
+ "inat_link", "image_thumb",
662
+ "scientific_name",
663
+ intersect(c("observed_on", "created_at_details.date"), names(df)),
664
+ "latitude", "longitude",
665
+ setdiff(names(df), c("inat_link", "image_thumb", "scientific_name", "observed_on", "created_at_details.date", "latitude", "longitude"))
 
 
 
666
  )
667
+ DT::datatable(df[, show_cols[show_cols %in% names(df)], drop = FALSE], escape = FALSE,
668
+ options = list(pageLength = 20, autoWidth = TRUE))
669
  })
 
 
 
 
 
 
 
 
 
 
 
 
670
  output$downloadAll <- downloadHandler(
671
+ filename = function() paste0("inat_dead_ALL_", Sys.Date(), ".csv"),
672
+ content = function(file) { req(result_data()); readr::write_csv(result_data()$merged_df_all, file) }
 
 
 
 
 
673
  )
674
  }
675
 
example.html ADDED
The diff for this file is too large to render. See raw diff
 
poc/app_no90_2025.R ADDED
@@ -0,0 +1,801 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Remove mapping 9% and replaced it with a hexbin map
2
+ # Remove download option for 90% percentile of data
3
+ # Polish the About and How to section
4
+ # Remove Data Table (Top 90%) in title and replace it with all the data as well!
5
+
6
+ # Optional: Either incorporate past mortality data or just upload in zenodo
7
+ # Optional: Date slider
8
+
9
+ ##################################################################
10
+ # Single R Script: Mortality Analysis + Shiny - Hexbin Map (All Data)
11
+ # Using Weekly Queries (No Hard-Coded Years)
12
+ ##################################################################
13
+
14
+ # See https://www.inaturalist.org/pages/api+recommended+practices
15
+ # Query Rate
16
+ # Please keep requests to about 1 per second, and around 10k API requests a day
17
+ # The API is meant to be used for building applications and for fetching small to medium batches of data. It is not meant to be a way to download data in bulk
18
+ # Requests exceeding this limit might be throttled, and will return an HTTP 429 exception “Too Many Requests”
19
+ # Please add delays into your code to keep under these limits, and especially if you’re getting 429 errors
20
+ # We may block IPs that consistently exceed these limits
21
+ # Please use a single IP address for fetching data. If we think multiple IPs are being used in coordination to bypass rate limits, we may block those IPs regardless of query rate
22
+ # Downloading over 5 GB of media per hour or 24 GB of media per day may result in a permanent block
23
+ # If writing software to interact with the API, please consider using a custom User Agent to identify your application, or iNaturalist username, or something we might use to differentiate your requests
24
+ # The User Agent can be set with an HTTP header, e.g. User-Agent: [[application or user name]
25
+
26
+ ### 1) Install/Load Required Packages ####
27
+ required_packages <- c(
28
+ "httr", "jsonlite", "tidyverse", "glue", "lubridate",
29
+ "wesanderson", "viridis", "hexbin", "shinycssloaders",
30
+ "DT", "maps", "mapdata", "leaflet", "leaflet.extras",
31
+ "shinythemes", "shiny"
32
+ )
33
+
34
+ installed_packages <- rownames(installed.packages())
35
+ for (pkg in required_packages) {
36
+ if (!pkg %in% installed_packages) {
37
+ install.packages(pkg, dependencies = TRUE)
38
+ }
39
+ }
40
+
41
+ library(httr)
42
+ library(jsonlite)
43
+ library(tidyverse)
44
+ library(glue)
45
+ library(lubridate)
46
+ library(wesanderson)
47
+ library(viridis)
48
+ library(hexbin)
49
+ library(shinycssloaders)
50
+ library(DT)
51
+ library(maps)
52
+ library(mapdata)
53
+ library(leaflet)
54
+ library(leaflet.extras)
55
+ library(shinythemes)
56
+ library(shiny)
57
+
58
+ ##################################################################
59
+ # 2) Mortality-Analysis Functions
60
+ ##################################################################
61
+
62
+ # -- Base function to fetch dead observations over a specified date range --
63
+ fetch_dead_data_once <- function(
64
+ place_id = NULL,
65
+ swlat = NULL,
66
+ swlng = NULL,
67
+ nelat = NULL,
68
+ nelng = NULL,
69
+ start_date,
70
+ end_date,
71
+ iconic_taxa = NULL,
72
+ taxon_name = NULL,
73
+ conservation_status = NULL,
74
+ per_page = 200,
75
+ max_pages = 200
76
+ ) {
77
+ base_url <- "https://api.inaturalist.org/v1/observations"
78
+
79
+ q_parts <- list(
80
+ "term_id=17", # 'Dead' annotation
81
+ "term_value_id=19", # 'Dead' annotation
82
+ "verifiable=true", # only verifiable
83
+ glue("d1={start_date}"), # start date
84
+ glue("d2={end_date}"), # end date
85
+ "order=desc",
86
+ "order_by=created_at",
87
+ glue("per_page={per_page}")
88
+ )
89
+
90
+ if (!is.null(iconic_taxa) && iconic_taxa != "") {
91
+ q_parts <- c(q_parts, glue("iconic_taxa={iconic_taxa}"))
92
+ }
93
+
94
+ if (!is.null(taxon_name) && taxon_name != "") {
95
+ q_parts <- c(q_parts, glue("taxon_name={URLencode(taxon_name)}"))
96
+ }
97
+
98
+ if (!is.null(conservation_status) && conservation_status != "") {
99
+ # If you'd like to filter by iNat conservation status,
100
+ # or e.g. pass additional parameters to the API
101
+ if (!grepl("=", conservation_status, fixed = TRUE)) {
102
+ q_parts <- c(q_parts, glue("cs={URLencode(conservation_status)}"))
103
+ } else {
104
+ q_parts <- c(q_parts, conservation_status)
105
+ }
106
+ }
107
+
108
+ query_params <- paste(q_parts, collapse = "&")
109
+
110
+ # Build location portion of query
111
+ loc_part <- ""
112
+ if (!is.null(place_id)) {
113
+ loc_part <- glue("&place_id={place_id}")
114
+ } else if (!is.null(swlat) && !is.null(swlng) &&
115
+ !is.null(nelat) && !is.null(nelng)) {
116
+ loc_part <- glue("&nelat={nelat}&nelng={nelng}&swlat={swlat}&swlng={swlng}")
117
+ } else {
118
+ stop("Must provide either 'place_id' OR bounding box (swlat, swlng, nelat, nelng).")
119
+ }
120
+
121
+ observations_list <- list()
122
+ current_page <- 1
123
+
124
+ while (current_page <= max_pages) {
125
+ query_url <- paste0(
126
+ base_url, "?", query_params, "&page=", current_page, loc_part
127
+ )
128
+
129
+ message("Fetching page ", current_page,
130
+ " [", start_date, " to ", end_date, "]:\n", query_url)
131
+
132
+ resp <- GET(query_url)
133
+ if (http_error(resp)) {
134
+ warning("HTTP error on page ", current_page, ": ", status_code(resp))
135
+ break
136
+ }
137
+
138
+ parsed <- content(resp, as = "text", encoding = "UTF-8") %>%
139
+ fromJSON(flatten = TRUE)
140
+
141
+ if (length(parsed$results) == 0) {
142
+ message("No more results at page ", current_page)
143
+ break
144
+ }
145
+
146
+ obs_page_df <- as_tibble(parsed$results)
147
+ observations_list[[current_page]] <- obs_page_df
148
+
149
+ # If the returned page is smaller than per_page, we've reached the last page
150
+ if (nrow(obs_page_df) < per_page) {
151
+ message("Reached last page of results at page ", current_page)
152
+ break
153
+ }
154
+
155
+ current_page <- current_page + 1
156
+ Sys.sleep(1.4) # Polite pause, reduced it from 1.5 initiallly
157
+ }
158
+
159
+ observations_all <- bind_rows(observations_list)
160
+ return(observations_all)
161
+ }
162
+
163
+ # -- Function to fetch data by iterating through each WEEK of a given year --
164
+ fetch_dead_data_weekly <- function(
165
+ year,
166
+ place_id = NULL,
167
+ swlat = NULL,
168
+ swlng = NULL,
169
+ nelat = NULL,
170
+ nelng = NULL,
171
+ iconic_taxa = NULL,
172
+ taxon_name = NULL,
173
+ conservation_status = NULL,
174
+ per_page = 200,
175
+ max_pages = 200
176
+ ) {
177
+ start_of_year <- as.Date(glue("{year}-01-01"))
178
+ end_of_year <- as.Date(glue("{year}-12-31"))
179
+
180
+ # Create a sequence of "week starts" from Jan 1 to Dec 31
181
+ week_starts <- seq.Date(start_of_year, end_of_year, by = "1 week")
182
+
183
+ weekly_list <- list()
184
+
185
+ for (i in seq_along(week_starts)) {
186
+ start_date <- week_starts[i]
187
+ # If not the last index, end_date = next start - 1 day, else clamp to year-end
188
+ if (i < length(week_starts)) {
189
+ end_date <- week_starts[i + 1] - 1
190
+ } else {
191
+ end_date <- end_of_year
192
+ }
193
+
194
+ message("\n--- Querying ", year, ", Week #", i,
195
+ " [", start_date, " to ", end_date, "] ---")
196
+
197
+ df_week <- fetch_dead_data_once(
198
+ place_id = place_id,
199
+ swlat = swlat,
200
+ swlng = swlng,
201
+ nelat = nelat,
202
+ nelng = nelng,
203
+ start_date = start_date,
204
+ end_date = end_date,
205
+ iconic_taxa = iconic_taxa,
206
+ taxon_name = taxon_name,
207
+ conservation_status = conservation_status,
208
+ per_page = per_page,
209
+ max_pages = max_pages
210
+ )
211
+ weekly_list[[i]] <- df_week
212
+
213
+ Sys.sleep(1.5)
214
+ }
215
+
216
+ year_df <- bind_rows(weekly_list)
217
+ return(year_df)
218
+ }
219
+
220
+ # -- Wrapper that iterates over multiple years, pulling data weekly for each year --
221
+ getDeadVertebrates_weeklyLoop <- function(
222
+ years, # <--- No default: pass your own vector of years
223
+ place_id = NULL,
224
+ swlat = NULL,
225
+ swlng = NULL,
226
+ nelat = NULL,
227
+ nelng = NULL,
228
+ iconic_taxa = NULL,
229
+ taxon_name = NULL,
230
+ conservation_status = NULL,
231
+ per_page = 500,
232
+ max_pages = 500,
233
+ outdir = NULL
234
+ ) {
235
+ all_years_list <- list()
236
+
237
+ # For each year, run weekly fetch
238
+ for (yr in years) {
239
+ message("\n========= YEAR: ", yr, " ==========\n")
240
+ yr_df <- fetch_dead_data_weekly(
241
+ year = yr,
242
+ place_id = place_id,
243
+ swlat = swlat,
244
+ swlng = swlng,
245
+ nelat = nelat,
246
+ nelng = nelng,
247
+ iconic_taxa= iconic_taxa,
248
+ taxon_name = taxon_name,
249
+ conservation_status = conservation_status,
250
+ per_page = per_page,
251
+ max_pages = max_pages
252
+ ) %>%
253
+ mutate(Window = as.character(yr))
254
+
255
+ all_years_list[[as.character(yr)]] <- yr_df
256
+ }
257
+
258
+ merged_df_all <- bind_rows(all_years_list)
259
+
260
+ # If no data found or missing crucial columns, create empty placeholders
261
+ if (!"created_at_details.date" %in% names(merged_df_all) ||
262
+ nrow(merged_df_all) == 0) {
263
+ daily_plot <- ggplot() +
264
+ labs(title = "No 'Dead' Observations Found", x = NULL, y = NULL) +
265
+ theme_void()
266
+
267
+ wong_palette <- c(
268
+ "#000000","#CC6666","#D55E00","#E69F00",
269
+ "#0072B2", "#56B4E9",
270
+ "#009E73","#66CC99","#999933", "#F0E442",
271
+ "#CC79A7"
272
+ )
273
+
274
+ top_species_plot <- ggplot() +
275
+ labs(title = "No species data", x = NULL, y = NULL) +
276
+ theme_void()
277
+
278
+ map_hotspots_gg <- ggplot() +
279
+ labs(title = "No data for map") +
280
+ theme_void()
281
+
282
+ return(list(
283
+ merged_df_all = merged_df_all,
284
+ merged_df = merged_df_all,
285
+ daily_plot = daily_plot,
286
+ top_species_plot = top_species_plot,
287
+ map_hotspots_gg = map_hotspots_gg,
288
+ daily_90th_quant = NA
289
+ ))
290
+ }
291
+
292
+ # Optionally write out to disk
293
+ if (!is.null(outdir)) {
294
+ if (!dir.exists(outdir)) {
295
+ dir.create(outdir, recursive = TRUE)
296
+ }
297
+ readr::write_csv(merged_df_all, file.path(outdir, "merged_df_ALL_data.csv"))
298
+ }
299
+
300
+ # Aggregate counts by day
301
+ counts_by_day <- merged_df_all %>%
302
+ mutate(obs_date = as.Date(`observed_on`)) %>%
303
+ group_by(Window, obs_date) %>%
304
+ summarise(n = n_distinct(id), .groups = "drop")
305
+
306
+ y_max_value <- max(counts_by_day$n, na.rm = TRUE)
307
+ n_windows <- length(unique(counts_by_day$Window))
308
+ wes_colors <- wes_palette("Zissou1", n_windows, type = "discrete")
309
+
310
+ # Daily line plot
311
+
312
+ wong_palette <- c(
313
+ "#000000","#CC6666","#D55E00","#E69F00",
314
+ "#0072B2", "#56B4E9",
315
+ "#009E73","#66CC99","#999933", "#F0E442",
316
+ "#CC79A7"
317
+ )
318
+
319
+
320
+
321
+ daily_plot <- ggplot(counts_by_day, aes(x = obs_date, y = n, color = Window)) +
322
+ geom_line(size = 1.2) +
323
+ geom_point(size = 2) +
324
+ scale_x_date(date_labels = "%b", date_breaks = "1 month") +
325
+ scale_y_continuous(limits = c(0, y_max_value)) +
326
+ scale_color_manual(values = wong_palette) +
327
+ labs(
328
+ title = glue("Daily 'Dead' Observations (Years {paste(years, collapse=', ')})"),
329
+ x = "Month",
330
+ y = "Number of Observations",
331
+ color = "Year"
332
+ ) +
333
+ theme_minimal(base_size = 14) +
334
+ theme(axis.text.x = element_text(angle = 45, hjust = 1))
335
+
336
+ # Top species bar plot
337
+ if ("taxon.name" %in% names(merged_df_all)) {
338
+ species_counts <- merged_df_all %>%
339
+ filter(!is.na(taxon.name)) %>%
340
+ group_by(Window, taxon.name) %>%
341
+ summarise(dead_count = n(), .groups = "drop")
342
+
343
+ top_species_overall <- species_counts %>%
344
+ group_by(taxon.name) %>%
345
+ summarise(total_dead = sum(dead_count)) %>%
346
+ arrange(desc(total_dead)) %>%
347
+ slice_head(n = 20)
348
+
349
+ species_top20 <- species_counts %>%
350
+ filter(taxon.name %in% top_species_overall$taxon.name)
351
+
352
+ wong_palette <- c(
353
+ "#000000","#CC6666","#D55E00","#E69F00",
354
+ "#0072B2", "#56B4E9",
355
+ "#009E73","#66CC99","#999933", "#F0E442",
356
+ "#CC79A7"
357
+ )
358
+
359
+ top_species_plot <- ggplot(species_top20, aes(
360
+ x = reorder(taxon.name, -dead_count),
361
+ y = dead_count,
362
+ fill= Window
363
+ )) +
364
+ geom_col(position = position_dodge(width = 0.7)) +
365
+ coord_flip() +
366
+ labs(
367
+ title = "Top 20 Species with 'Dead' Observations",
368
+ x = "Species",
369
+ y = "Number of Dead Observations",
370
+ fill = "Year"
371
+ ) +
372
+ theme_minimal(base_size = 14)+
373
+ scale_fill_manual(values = wong_palette)
374
+ } else {
375
+ top_species_plot <- ggplot() +
376
+ labs(title = "No 'taxon.name' column found", x = NULL, y = NULL) +
377
+ theme_void()
378
+ }
379
+
380
+ # Identify "high mortality" days (>= 90th percentile)
381
+ daily_quantile <- quantile(counts_by_day$n, probs = 0.90, na.rm = TRUE)
382
+ high_mortality_days <- counts_by_day %>%
383
+ filter(n >= daily_quantile) %>%
384
+ pull(obs_date)
385
+
386
+ merged_high <- merged_df_all %>%
387
+ mutate(obs_date = as.Date(`observed_on`)) %>%
388
+ filter(obs_date %in% high_mortality_days)
389
+
390
+ # --------------------------------------------------------------
391
+ # Hexbin Map of ALL data (replaces previous hotspot map section)
392
+ # --------------------------------------------------------------
393
+ if ("location" %in% names(merged_df_all)) {
394
+ location_df_all <- merged_df_all %>%
395
+ filter(!is.na(location) & location != "") %>%
396
+ separate(location, into = c("lat_str", "lon_str"), sep = ",", remove = FALSE) %>%
397
+ mutate(
398
+ latitude = as.numeric(lat_str),
399
+ longitude = as.numeric(lon_str)
400
+ )
401
+
402
+ if (nrow(location_df_all) == 0) {
403
+ map_hotspots_gg <- ggplot() +
404
+ labs(title = "No spatial data available for map") +
405
+ theme_void()
406
+ } else {
407
+ min_lon <- min(location_df_all$longitude, na.rm = TRUE)
408
+ max_lon <- max(location_df_all$longitude, na.rm = TRUE)
409
+ min_lat <- min(location_df_all$latitude, na.rm = TRUE)
410
+ max_lat <- max(location_df_all$latitude, na.rm = TRUE)
411
+
412
+ map_hotspots_gg <- ggplot() +
413
+ borders("world", fill = "gray80", colour = "white") +
414
+ stat_bin_hex(
415
+ data = location_df_all,
416
+ aes(x = longitude, y = latitude),
417
+ bins = 500,
418
+ color = "black",
419
+ alpha = 0.8
420
+ ) +
421
+ scale_fill_viridis_c(option = "plasma", name = "Observation Count") +
422
+ coord_quickmap(
423
+ xlim = c(min_lon, max_lon),
424
+ ylim = c(min_lat, max_lat),
425
+ expand = TRUE
426
+ ) +
427
+ labs(
428
+ title = glue("'Dead' Wildlife Observations Hexbin Map (Years {paste(years, collapse=', ')})"),
429
+ x = "Longitude",
430
+ y = "Latitude"
431
+ ) +
432
+ theme_classic(base_size = 14) +
433
+ theme(
434
+ axis.text.x = element_text(face = "bold", size = 16, color = 'black'),
435
+ axis.title.x = element_text(face = "bold", size = 16, color = 'black'),
436
+ axis.text.y = element_text(face = "bold", size = 16, color = 'black'),
437
+ axis.title.y = element_text(face = "bold", size = 16, color = 'black')
438
+ )
439
+ }
440
+ } else {
441
+ map_hotspots_gg <- ggplot() +
442
+ labs(title = "No 'location' column for map") +
443
+ theme_void()
444
+ }
445
+ # --------------------------------------------------------------
446
+
447
+ # Optionally save outputs
448
+ if (!is.null(outdir)) {
449
+ readr::write_csv(merged_high, file.path(outdir, "merged_df_top90.csv"))
450
+ ggsave(file.path(outdir, "daily_plot.png"),
451
+ daily_plot, width = 8, height = 5, dpi = 300)
452
+ ggsave(file.path(outdir, "top_species_plot.png"),
453
+ top_species_plot, width = 7, height = 7, dpi = 300)
454
+ ggsave(file.path(outdir, "hexbin_map.png"),
455
+ map_hotspots_gg, width = 8, height = 5, dpi = 300)
456
+ }
457
+
458
+ return(list(
459
+ merged_df_all = merged_df_all,
460
+ merged_df = merged_high,
461
+ daily_plot = daily_plot,
462
+ top_species_plot = top_species_plot,
463
+ map_hotspots_gg = map_hotspots_gg,
464
+ daily_90th_quant = daily_quantile
465
+ ))
466
+ }
467
+
468
+ ##################################################################
469
+ # 3) Shiny App: UI + Server (Weekly Queries)
470
+ ##################################################################
471
+
472
+ ui <- fluidPage(
473
+ theme = shinytheme("cosmo"), # Use a professional theme from shinythemes
474
+
475
+ # -- Logo and Title at the top --
476
+ fluidRow(
477
+ column(
478
+ width = 2,
479
+ tags$img(src = "www/all_logos.png", height = "400px")
480
+ ),
481
+ column(
482
+ width = 10,
483
+ titlePanel("Dead Wildlife Observations from iNaturalist")
484
+ )
485
+ ),
486
+ hr(),
487
+
488
+ sidebarLayout(
489
+ sidebarPanel(
490
+ tabsetPanel(
491
+ id = "sidebar_tabs",
492
+
493
+ # == Query Panel ==
494
+ tabPanel(
495
+ title = "Query",
496
+ br(),
497
+ radioButtons("region_mode", "Region Input Mode:",
498
+ choices = c("Enter Numeric place_id" = "place",
499
+ "Two-Click Bounding Box" = "bbox"),
500
+ selected = "bbox"),
501
+
502
+ # If user chooses numeric "place_id"
503
+ conditionalPanel(
504
+ condition = "input.region_mode == 'place'",
505
+ numericInput("place_id",
506
+ "Numeric place_id (e.g. 1 = USA, 6712 = Canada, 14 = California)",
507
+ value = 1, min = 1, max = 999999, step = 1)
508
+ ),
509
+
510
+ # If user chooses bounding box
511
+ conditionalPanel(
512
+ condition = "input.region_mode == 'bbox'",
513
+ helpText("Left-click once for the SW corner, once more for the NE corner."),
514
+ leafletOutput("map_two_click", height = "300px"),
515
+ br(),
516
+ actionButton("clear_bbox", "Clear bounding box"),
517
+ br(), br(),
518
+ verbatimTextOutput("bbox_coords")
519
+ ),
520
+
521
+ # Years
522
+ checkboxGroupInput("years", "Select Year(s):",
523
+ choices = 2018:2025,
524
+ selected = c(2022, 2023)),
525
+
526
+ # Query by iconic class or exact species
527
+ radioButtons("query_type", "Query By:",
528
+ choices = c("Taxon Class" = "iconic",
529
+ "Exact Species Name" = "species")),
530
+ conditionalPanel(
531
+ condition = "input.query_type == 'iconic'",
532
+ selectInput("iconic_taxon", "Select Taxon Class:",
533
+ choices = c("Aves", "Mammalia", "Reptilia", "Amphibia",
534
+ "Actinopterygii", "Mollusca", "Animalia"),
535
+ selected = "Aves")
536
+ ),
537
+ conditionalPanel(
538
+ condition = "input.query_type == 'species'",
539
+ textInput("species_name", "Enter exact species name (e.g. Puma concolor)", "")
540
+ ),
541
+
542
+ actionButton("run_query", "Run Query", icon = icon("play")),
543
+ hr(),
544
+ # Removed download of hotspot/top-90% CSV
545
+ downloadButton("downloadAll", "Download ALL Data CSV", icon = icon("download"))
546
+ ),
547
+
548
+ # == About Panel ==
549
+ tabPanel(
550
+ title = "About",
551
+ br(),
552
+ p("This Shiny application was created by Diego Ellis Soto (University of California Berkeley) in collabiration with Liam U. Taylor (Bowdoin University), Lizzy Edson (California Academy of Sciences), Christopher J. Schell (University of California Berkeley),Carl Boettiger (University of California Berkeley),Rebecca Johnsons (California Academy of Sciences).\n It queries iNaturalist for observations that have been annotated as 'Dead' wildlife (term_id=17, term_value_id=19) in real time.\n These data are updated daily on the iNaturalist API. The data is fetched via the iNaturalist API and summarized here designed for scientific or conservation purposes."),
553
+ p("Digital platforms of participatory science like iNaturalist allow everyday people to collect and share data about local biodiversity.\n Recording observations of dead wildlife can help us track mortality events, disease spread, and other factors affecting animal populations.\n In fact information on wildlife mortality are often more critical for conservation efforts than living ones."),
554
+ p("We encourage everyone to contribute their sightings responsibly, ensuring that any data on roadkill or other mortalities can help management and conservation efforts, and\n raise public awareness.")
555
+ ),
556
+ # == Participatory Science Panel ==
557
+ # tabPanel(
558
+ # title = "Participatory Science",
559
+ # br(),
560
+ # p("Digital platforms of participatory science like iNaturalist allow everyday people to collect and share data about local biodiversity.\n Recording observations of dead wildlife can help us track mortality events, disease spread, and other factors affecting animal populations.\n In fact information on wildlife mortality are often more critical for conservation efforts than living ones."),
561
+ # p("We encourage everyone to contribute their sightings responsibly, ensuring that any data on roadkill or other mortalities can help management and conservation efforts, and\n raise public awareness.")
562
+ # ),
563
+ #
564
+ # == How To Use Panel ==
565
+ tabPanel(
566
+ title = "How to Use",
567
+ br(),
568
+ p("This application lets you retrieve data about dead wildlife observations from iNaturalist.\n You can choose to manually provide a numeric place_id or define a custom bounding box by clicking twice on the map."),
569
+ p("You can also decide whether to query by taxon class (e.g. Aves) or by exact species name (e.g. Puma concolor)."),
570
+ p("After selecting your inputs, press 'Run Query.' and a CSV for downloads is provided: (1) for all data retrieved.")
571
+ )
572
+ )
573
+ ),
574
+
575
+ mainPanel(
576
+ tabsetPanel(
577
+ tabPanel("Daily Time Series", withSpinner(plotOutput("dailyPlot"), type = 6)),
578
+ tabPanel("Top Species", withSpinner(plotOutput("speciesPlot"), type = 6)),
579
+ tabPanel("Hexbin Map (All Data)", withSpinner(plotOutput("hotspotMap"), type = 6)),
580
+ tabPanel("Data Table", withSpinner(DT::dataTableOutput("dataTable"), type = 6))
581
+ )
582
+ )
583
+ )
584
+ )
585
+
586
+ server <- function(input, output, session) {
587
+
588
+ # Reactive values for bounding box corners
589
+ rv <- reactiveValues(
590
+ corner1 = NULL,
591
+ corner2 = NULL,
592
+ bbox = NULL
593
+ )
594
+
595
+ # Initialize map
596
+ output$map_two_click <- renderLeaflet({
597
+ leaflet() %>%
598
+ addTiles() %>%
599
+ setView(lng = -100, lat = 40, zoom = 4)
600
+ })
601
+
602
+ # Handle bounding box clicks
603
+ observeEvent(input$map_two_click_click, {
604
+ req(input$region_mode == "bbox")
605
+
606
+ click <- input$map_two_click_click
607
+ if (is.null(click)) return()
608
+
609
+ lat_clicked <- click$lat
610
+ lng_clicked <- click$lng
611
+
612
+ if (is.null(rv$corner1)) {
613
+ rv$corner1 <- c(lat_clicked, lng_clicked)
614
+ showNotification("First corner set. Now click for the opposite corner.")
615
+
616
+ leafletProxy("map_two_click") %>%
617
+ clearMarkers() %>%
618
+ addMarkers(lng = lng_clicked, lat = lat_clicked, popup = "Corner 1")
619
+
620
+ rv$corner2 <- NULL
621
+ rv$bbox <- NULL
622
+
623
+ } else {
624
+ rv$corner2 <- c(lat_clicked, lng_clicked)
625
+
626
+ lat_min <- min(rv$corner1[1], rv$corner2[1])
627
+ lat_max <- max(rv$corner1[1], rv$corner2[1])
628
+ lng_min <- min(rv$corner1[2], rv$corner2[2])
629
+ lng_max <- max(rv$corner1[2], rv$corner2[2])
630
+
631
+ rv$bbox <- c(lat_min, lng_min, lat_max, lng_max)
632
+
633
+ showNotification("Second corner set. Bounding box defined!", duration = 2)
634
+
635
+ leafletProxy("map_two_click") %>%
636
+ clearMarkers() %>%
637
+ addMarkers(lng = rv$corner1[2], lat = rv$corner1[1], popup = "Corner 1") %>%
638
+ addMarkers(lng = rv$corner2[2], lat = rv$corner2[1], popup = "Corner 2") %>%
639
+ clearShapes() %>%
640
+ addRectangles(
641
+ lng1 = lng_min, lat1 = lat_min,
642
+ lng2 = lng_max, lat2 = lat_max,
643
+ fillColor = "red", fillOpacity = 0.2,
644
+ color = "red"
645
+ )
646
+ }
647
+ })
648
+
649
+ observeEvent(input$clear_bbox, {
650
+ rv$corner1 <- NULL
651
+ rv$corner2 <- NULL
652
+ rv$bbox <- NULL
653
+
654
+ leafletProxy("map_two_click") %>%
655
+ clearMarkers() %>%
656
+ clearShapes()
657
+ })
658
+
659
+ output$bbox_coords <- renderText({
660
+ req(input$region_mode == "bbox")
661
+
662
+ if (is.null(rv$bbox)) {
663
+ "No bounding box defined yet."
664
+ } else {
665
+ paste0(
666
+ "Bounding box:\n",
667
+ "SW corner: (", rv$bbox[1], ", ", rv$bbox[2], ")\n",
668
+ "NE corner: (", rv$bbox[3], ", ", rv$bbox[4], ")"
669
+ )
670
+ }
671
+ })
672
+
673
+ # Store final query results
674
+ result_data <- reactiveVal(NULL)
675
+
676
+ # Main "Run Query" button
677
+ observeEvent(input$run_query, {
678
+ req(input$years)
679
+ shiny::validate(need(length(input$years) > 0, "Please select at least one year."))
680
+
681
+ yrs <- as.numeric(input$years)
682
+
683
+ # Region logic
684
+ place_id_val <- NULL
685
+ swlat_val <- NULL
686
+ swlng_val <- NULL
687
+ nelat_val <- NULL
688
+ nelng_val <- NULL
689
+
690
+ if (input$region_mode == "place") {
691
+ place_id_val <- input$place_id
692
+ } else {
693
+ shiny::validate(need(!is.null(rv$bbox), "Please click twice on the map to define bounding box."))
694
+ swlat_val <- rv$bbox[1]
695
+ swlng_val <- rv$bbox[2]
696
+ nelat_val <- rv$bbox[3]
697
+ nelng_val <- rv$bbox[4]
698
+ }
699
+
700
+ # Query type logic
701
+ iconic_val <- NULL
702
+ species_val <- NULL
703
+ if (input$query_type == "iconic") {
704
+ iconic_val <- input$iconic_taxon
705
+ } else {
706
+ species_val <- input$species_name
707
+ }
708
+
709
+ # Fetch data
710
+ withProgress(message = 'Fetching data from iNaturalist (Weekly)...', value = 0, {
711
+ incProgress(0.4)
712
+
713
+ query_res <- getDeadVertebrates_weeklyLoop(
714
+ years = yrs,
715
+ place_id = place_id_val,
716
+ swlat = swlat_val,
717
+ swlng = swlng_val,
718
+ nelat = nelat_val,
719
+ nelng = nelng_val,
720
+ iconic_taxa = iconic_val,
721
+ taxon_name = species_val
722
+ )
723
+
724
+ result_data(query_res)
725
+ incProgress(1)
726
+ })
727
+ })
728
+
729
+ # Output plots
730
+ output$dailyPlot <- renderPlot({
731
+ req(result_data())
732
+ result_data()$daily_plot
733
+ })
734
+
735
+ output$speciesPlot <- renderPlot({
736
+ req(result_data())
737
+ result_data()$top_species_plot
738
+ })
739
+
740
+ output$hotspotMap <- renderPlot({
741
+ req(result_data())
742
+ result_data()$map_hotspots_gg
743
+ })
744
+
745
+ # Output data table (top-90% subset)
746
+ output$dataTable <- DT::renderDataTable({
747
+ req(result_data())
748
+ # df <- result_data()$merged_df # top 90% subset
749
+ df <- result_data()$merged_df_all # all data
750
+
751
+ if (nrow(df) == 0) {
752
+ return(DT::datatable(
753
+ data.frame(Message = "No records found"),
754
+ options = list(pageLength = 20)
755
+ ))
756
+ }
757
+
758
+ df <- df %>%
759
+ mutate(
760
+ inat_link = paste0(
761
+ "<a href='https://www.inaturalist.org/observations/",
762
+ id, "' target='_blank'>", id, "</a>"
763
+ )
764
+ )
765
+
766
+ photo_col <- "taxon.default_photo.square_url"
767
+ if (photo_col %in% names(df)) {
768
+ df$image_thumb <- ifelse(
769
+ !is.na(df[[photo_col]]) & df[[photo_col]] != "",
770
+ paste0("<img src='", df[[photo_col]], "' width='50'/>"),
771
+ "No Img"
772
+ )
773
+ } else {
774
+ df$image_thumb <- "No Img"
775
+ }
776
+
777
+ show_cols <- c(
778
+ "inat_link", "image_thumb", "taxon.name", "created_at_details.date",
779
+ setdiff(names(df), c("inat_link", "image_thumb", "taxon.name", "created_at_details.date"))
780
+ )
781
+
782
+ DT::datatable(
783
+ df[, show_cols, drop = FALSE],
784
+ escape = FALSE,
785
+ options = list(pageLength = 20, autoWidth = TRUE)
786
+ )
787
+ })
788
+
789
+ # Download handler for ALL data only (hotspot CSV button removed)
790
+ output$downloadAll <- downloadHandler(
791
+ filename = function() {
792
+ paste0("inat_dead_ALL_", Sys.Date(), ".csv")
793
+ },
794
+ content = function(file) {
795
+ req(result_data())
796
+ readr::write_csv(result_data()$merged_df_all, file)
797
+ }
798
+ )
799
+ }
800
+
801
+ shinyApp(ui = ui, server = server)
poc/app_no90_dateslider_2025.R ADDED
@@ -0,0 +1,619 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ##################################################################
2
+ # Single R Script: Mortality Analysis + Shiny – Hexbin Map (All Data)
3
+ # Date‑range queries (no hard‑coded years)
4
+ ##################################################################
5
+
6
+ ### 1) Install/Load Required Packages ####
7
+ required_packages <- c(
8
+ "httr", "jsonlite", "tidyverse", "glue", "lubridate",
9
+ "wesanderson", "viridis", "hexbin", "shinycssloaders",
10
+ "DT", "maps", "mapdata", "leaflet", "leaflet.extras",
11
+ "shinythemes", "shiny"
12
+ )
13
+
14
+ installed_packages <- rownames(installed.packages())
15
+ for (pkg in required_packages) {
16
+ if (!pkg %in% installed_packages) {
17
+ install.packages(pkg, dependencies = TRUE)
18
+ }
19
+ }
20
+
21
+ library(httr)
22
+ library(jsonlite)
23
+ library(tidyverse)
24
+ library(glue)
25
+ library(lubridate)
26
+ library(wesanderson)
27
+ library(viridis)
28
+ library(hexbin)
29
+ library(shinycssloaders)
30
+ library(DT)
31
+ library(maps)
32
+ library(mapdata)
33
+ library(leaflet)
34
+ library(leaflet.extras)
35
+ library(shinythemes)
36
+ library(shiny)
37
+
38
+ ##################################################################
39
+ # 2) Mortality‑Analysis Functions
40
+ ##################################################################
41
+
42
+ # -- Base function to fetch dead observations for a specific date span --
43
+ fetch_dead_data_once <- function(
44
+ place_id = NULL,
45
+ swlat = NULL,
46
+ swlng = NULL,
47
+ nelat = NULL,
48
+ nelng = NULL,
49
+ start_date,
50
+ end_date,
51
+ iconic_taxa = NULL,
52
+ taxon_name = NULL,
53
+ conservation_status = NULL,
54
+ per_page = 200,
55
+ max_pages = 200
56
+ ) {
57
+ base_url <- "https://api.inaturalist.org/v1/observations"
58
+
59
+ q_parts <- list(
60
+ "term_id=17", # 'Dead' annotation
61
+ "term_value_id=19", # 'Dead' annotation
62
+ "verifiable=true", # only verifiable
63
+ glue("d1={start_date}"), # start date
64
+ glue("d2={end_date}"), # end date
65
+ "order=desc",
66
+ "order_by=created_at",
67
+ glue("per_page={per_page}")
68
+ )
69
+
70
+ if (!is.null(iconic_taxa) && iconic_taxa != "") {
71
+ q_parts <- c(q_parts, glue("iconic_taxa={iconic_taxa}"))
72
+ }
73
+ if (!is.null(taxon_name) && taxon_name != "") {
74
+ q_parts <- c(q_parts, glue("taxon_name={URLencode(taxon_name)}"))
75
+ }
76
+ if (!is.null(conservation_status) && conservation_status != "") {
77
+ if (!grepl("=", conservation_status, fixed = TRUE)) {
78
+ q_parts <- c(q_parts, glue("cs={URLencode(conservation_status)}"))
79
+ } else {
80
+ q_parts <- c(q_parts, conservation_status)
81
+ }
82
+ }
83
+ query_params <- paste(q_parts, collapse = "&")
84
+
85
+ # Build location portion of query
86
+ loc_part <- ""
87
+ if (!is.null(place_id)) {
88
+ loc_part <- glue("&place_id={place_id}")
89
+ } else if (!is.null(swlat) && !is.null(swlng) &&
90
+ !is.null(nelat) && !is.null(nelng)) {
91
+ loc_part <- glue("&nelat={nelat}&nelng={nelng}&swlat={swlat}&swlng={swlng}")
92
+ } else {
93
+ stop("Must provide either 'place_id' OR bounding box (swlat, swlng, nelat, nelng).")
94
+ }
95
+
96
+ observations_list <- list()
97
+ current_page <- 1
98
+
99
+ while (current_page <= max_pages) {
100
+ query_url <- paste0(
101
+ base_url, "?", query_params, "&page=", current_page, loc_part
102
+ )
103
+
104
+ message("Fetching page ", current_page,
105
+ " [", start_date, " to ", end_date, "]:\n", query_url)
106
+
107
+ resp <- GET(query_url)
108
+ if (http_error(resp)) {
109
+ warning("HTTP error on page ", current_page, ": ", status_code(resp))
110
+ break
111
+ }
112
+
113
+ parsed <- content(resp, as = "text", encoding = "UTF-8") %>%
114
+ fromJSON(flatten = TRUE)
115
+
116
+ if (length(parsed$results) == 0) {
117
+ message("No more results at page ", current_page)
118
+ break
119
+ }
120
+
121
+ obs_page_df <- as_tibble(parsed$results)
122
+ observations_list[[current_page]] <- obs_page_df
123
+
124
+ # reached the last page?
125
+ if (nrow(obs_page_df) < per_page) {
126
+ message("Reached last page of results at page ", current_page)
127
+ break
128
+ }
129
+
130
+ current_page <- current_page + 1
131
+ Sys.sleep(1.4) # Polite pause
132
+ }
133
+
134
+ observations_all <- bind_rows(observations_list)
135
+ return(observations_all)
136
+ }
137
+
138
+ # -- Fetch data weekly across an arbitrary date range --
139
+ getDeadVertebrates_dateRange <- function(
140
+ start_date, # Date object or string YYYY‑MM‑DD
141
+ end_date, # Date object or string YYYY‑MM‑DD
142
+ place_id = NULL,
143
+ swlat = NULL,
144
+ swlng = NULL,
145
+ nelat = NULL,
146
+ nelng = NULL,
147
+ iconic_taxa = NULL,
148
+ taxon_name = NULL,
149
+ conservation_status = NULL,
150
+ per_page = 500,
151
+ max_pages = 500,
152
+ outdir = NULL
153
+ ) {
154
+ # Ensure dates are Date objects
155
+ start_date <- as.Date(start_date)
156
+ end_date <- as.Date(end_date)
157
+ if (start_date > end_date) {
158
+ stop("start_date must be earlier than end_date")
159
+ }
160
+
161
+ week_starts <- seq.Date(start_date, end_date, by = "1 week")
162
+ all_weeks_list <- list()
163
+
164
+ for (i in seq_along(week_starts)) {
165
+ st <- week_starts[i]
166
+ ed <- if (i < length(week_starts)) week_starts[i + 1] - 1 else end_date
167
+
168
+ message("\n--- Querying ", st, " to ", ed, " ---")
169
+
170
+ df_week <- fetch_dead_data_once(
171
+ place_id = place_id,
172
+ swlat = swlat,
173
+ swlng = swlng,
174
+ nelat = nelat,
175
+ nelng = nelng,
176
+ start_date = st,
177
+ end_date = ed,
178
+ iconic_taxa = iconic_taxa,
179
+ taxon_name = taxon_name,
180
+ conservation_status = conservation_status,
181
+ per_page = per_page,
182
+ max_pages = max_pages
183
+ )
184
+ all_weeks_list[[i]] <- df_week
185
+ Sys.sleep(1.5)
186
+ }
187
+
188
+ merged_df_all <- bind_rows(all_weeks_list)
189
+
190
+ # If empty, return placeholders
191
+ if (!"created_at_details.date" %in% names(merged_df_all) ||
192
+ nrow(merged_df_all) == 0) {
193
+ placeholder_plot <- function(title) {
194
+ ggplot() + labs(title = title, x = NULL, y = NULL) + theme_void()
195
+ }
196
+ return(list(
197
+ merged_df_all = merged_df_all,
198
+ merged_df = merged_df_all,
199
+ daily_plot = placeholder_plot("No 'Dead' Observations Found"),
200
+ top_species_plot = placeholder_plot("No species data"),
201
+ map_hotspots_gg = placeholder_plot("No data for map"),
202
+ daily_90th_quant = NA
203
+ ))
204
+ }
205
+
206
+ # Add year column for plotting
207
+ merged_df_all <- merged_df_all %>%
208
+ mutate(obs_date = as.Date(observed_on),
209
+ Window = format(obs_date, "%Y"))
210
+
211
+ # Optionally write out
212
+ if (!is.null(outdir)) {
213
+ if (!dir.exists(outdir)) dir.create(outdir, recursive = TRUE)
214
+ readr::write_csv(merged_df_all, file.path(outdir, "merged_df_ALL_data.csv"))
215
+ }
216
+
217
+ # Aggregate counts by day
218
+ counts_by_day <- merged_df_all %>%
219
+ group_by(Window, obs_date) %>%
220
+ summarise(n = n_distinct(id), .groups = "drop")
221
+
222
+ y_max_value <- max(counts_by_day$n, na.rm = TRUE)
223
+ wes_colors <- wes_palette("Zissou1", length(unique(counts_by_day$Window)), type = "discrete")
224
+
225
+ wong_palette <- c(
226
+ "#000000", "#CC6666", "#D55E00", "#E69F00",
227
+ "#0072B2", "#56B4E9",
228
+ "#009E73", "#66CC99", "#999933", "#F0E442",
229
+ "#CC79A7"
230
+ )
231
+
232
+ daily_plot <- ggplot(counts_by_day, aes(x = obs_date, y = n, color = Window)) +
233
+ geom_line(size = 1.2) +
234
+ geom_point(size = 2) +
235
+ scale_x_date(date_labels = "%b %d", date_breaks = "1 month") +
236
+ scale_y_continuous(limits = c(0, y_max_value)) +
237
+ scale_color_manual(values = wong_palette) +
238
+ labs(
239
+ title = glue("Daily 'Dead' Observations ({start_date} to {end_date})"),
240
+ x = "Date",
241
+ y = "Number of Observations",
242
+ color = "Year"
243
+ ) +
244
+ theme_minimal(base_size = 14) +
245
+ theme(axis.text.x = element_text(angle = 45, hjust = 1))
246
+
247
+ # Top species plot
248
+ if ("taxon.name" %in% names(merged_df_all)) {
249
+ species_counts <- merged_df_all %>%
250
+ filter(!is.na(taxon.name)) %>%
251
+ group_by(Window, taxon.name) %>%
252
+ summarise(dead_count = n(), .groups = "drop")
253
+
254
+ top_species_overall <- species_counts %>%
255
+ group_by(taxon.name) %>%
256
+ summarise(total_dead = sum(dead_count)) %>%
257
+ arrange(desc(total_dead)) %>%
258
+ slice_head(n = 20)
259
+
260
+ species_top20 <- species_counts %>%
261
+ filter(taxon.name %in% top_species_overall$taxon.name)
262
+
263
+ top_species_plot <- ggplot(species_top20, aes(
264
+ x = reorder(taxon.name, -dead_count),
265
+ y = dead_count,
266
+ fill= Window
267
+ )) +
268
+ geom_col(position = position_dodge(width = 0.7)) +
269
+ coord_flip() +
270
+ labs(
271
+ title = "Top 20 Species with 'Dead' Observations",
272
+ x = "Species",
273
+ y = "Number of Dead Observations",
274
+ fill = "Year"
275
+ ) +
276
+ theme_minimal(base_size = 14) +
277
+ scale_fill_manual(values = wong_palette)
278
+ } else {
279
+ top_species_plot <- ggplot() +
280
+ labs(title = "No 'taxon.name' column found", x = NULL, y = NULL) +
281
+ theme_void()
282
+ }
283
+
284
+ # Identify high‑mortality days (>= 90th percentile) – kept but no longer downloaded separately
285
+ daily_quantile <- quantile(counts_by_day$n, probs = 0.90, na.rm = TRUE)
286
+ high_mortality_days <- counts_by_day %>%
287
+ filter(n >= daily_quantile) %>%
288
+ pull(obs_date)
289
+
290
+ merged_high <- merged_df_all %>%
291
+ filter(obs_date %in% high_mortality_days)
292
+
293
+ # Hexbin map for ALL data
294
+ if ("location" %in% names(merged_df_all)) {
295
+ location_df_all <- merged_df_all %>%
296
+ filter(!is.na(location) & location != "") %>%
297
+ separate(location, into = c("lat_str", "lon_str"), sep = ",", remove = FALSE) %>%
298
+ mutate(latitude = as.numeric(lat_str), longitude = as.numeric(lon_str))
299
+
300
+
301
+ if (nrow(location_df_all) == 0) {
302
+ map_hotspots_gg <- ggplot() +
303
+ labs(title = "No spatial data available for map") +
304
+ theme_void()
305
+ } else {
306
+ # Calculate map limits from the data actually returned
307
+ x_limits <- range(location_df_all$longitude, na.rm = TRUE)
308
+ y_limits <- range(location_df_all$latitude, na.rm = TRUE)
309
+ map_hotspots_gg <- ggplot() +
310
+ borders("world", fill = "gray80", colour = "white") +
311
+ stat_bin_hex(
312
+ data = location_df_all,
313
+ aes(x = longitude, y = latitude),
314
+ bins = 500,
315
+ color = "black",
316
+ alpha = 0.8
317
+ ) +
318
+ scale_fill_viridis_c(option = "plasma", name = "Observation Count") +
319
+ coord_quickmap(xlim = x_limits, ylim = y_limits, expand = TRUE) +
320
+ labs(
321
+ title = glue("'Dead' Wildlife Hexbin Map ({start_date} to {end_date})"),
322
+ x = "Longitude",
323
+ y = "Latitude"
324
+ ) +
325
+ theme_classic(base_size = 14) +
326
+ theme(
327
+ axis.text = element_text(face = "bold", size = 14, colour = "black"),
328
+ axis.title = element_text(face = "bold", size = 16, colour = "black")
329
+ )
330
+ }
331
+ } else {
332
+ map_hotspots_gg <- ggplot() +
333
+ labs(title = "No 'location' column for map") +
334
+ theme_void()
335
+ }
336
+
337
+
338
+ # if (nrow(location_df_all) == 0) {
339
+ # map_hotspots_gg <- ggplot() +
340
+ # labs(title = "No spatial data available for map") +
341
+ # theme_void()
342
+ # } else {
343
+ # map_hotspots_gg <- ggplot() +
344
+ # borders("world", fill = "gray80", colour = "white") +
345
+ # stat_bin_hex(
346
+ # data = location_df_all,
347
+ # aes(x = longitude, y = latitude),
348
+ # bins = 500,
349
+ # color = "black",
350
+ # alpha = 0.8
351
+ # ) +
352
+ # coord_quickmap(xlim=bbox[c(1,3)], ylim=bbox[c(2,4)], expand=TRUE)+
353
+ # scale_fill_viridis_c(option = "plasma", name = "Observation Count") +
354
+ # coord_quickmap(expand = TRUE) +
355
+ # labs(
356
+ # title = glue("'Dead' Wildlife Hexbin Map ({start_date} to {end_date})"),
357
+ # x = "Longitude",
358
+ # y = "Latitude"
359
+ # ) +
360
+ # theme_classic(base_size = 14) +
361
+ # theme(
362
+ # axis.text = element_text(face = "bold", size = 14, colour = "black"),
363
+ # axis.title = element_text(face = "bold", size = 16, colour = "black")
364
+ # )
365
+ # }
366
+ # } else {
367
+ # map_hotspots_gg <- ggplot() + labs(title = "No 'location' column for map") + theme_void()
368
+ # }
369
+
370
+ # Save optional outputs
371
+ if (!is.null(outdir)) {
372
+ readr::write_csv(merged_high, file.path(outdir, "merged_df_top90.csv"))
373
+ ggsave(file.path(outdir, "daily_plot.png"), daily_plot, width = 8, height = 5, dpi = 300)
374
+ ggsave(file.path(outdir, "top_species_plot.png"), top_species_plot, width = 7, height = 7, dpi = 300)
375
+ ggsave(file.path(outdir, "hexbin_map.png"), map_hotspots_gg, width = 8, height = 5, dpi = 300)
376
+ }
377
+
378
+ return(list(
379
+ merged_df_all = merged_df_all,
380
+ merged_df = merged_high, # kept for potential future use
381
+ daily_plot = daily_plot,
382
+ top_species_plot = top_species_plot,
383
+ map_hotspots_gg = map_hotspots_gg,
384
+ daily_90th_quant = daily_quantile
385
+ ))
386
+ }
387
+
388
+ ##################################################################
389
+ # 3) Shiny App: UI + Server
390
+ ##################################################################
391
+
392
+ ui <- fluidPage(
393
+ theme = shinytheme("cosmo"),
394
+
395
+ # -- Logo and Title --
396
+ fluidRow(
397
+ column(width = 2, tags$img(src = "www/all_logos.png", height = "400px")),
398
+ column(width = 10, titlePanel("Dead Wildlife Observations from iNaturalist"))
399
+ ),
400
+ hr(),
401
+
402
+ sidebarLayout(
403
+ sidebarPanel(
404
+ tabsetPanel(id = "sidebar_tabs",
405
+
406
+ # == Query Panel ==
407
+ tabPanel("Query",
408
+ br(),
409
+ radioButtons("region_mode", "Region Input Mode:",
410
+ choices = c("Enter Numeric place_id" = "place",
411
+ "Two‑Click Bounding Box" = "bbox"),
412
+ selected = "bbox"),
413
+
414
+ # place_id input
415
+ conditionalPanel(
416
+ condition = "input.region_mode == 'place'",
417
+ numericInput("place_id", "Numeric place_id (e.g. 1 = USA, 14 = California)",
418
+ value = 1, min = 1, max = 999999, step = 1)
419
+ ),
420
+
421
+ # Bounding‑box selector
422
+ conditionalPanel(
423
+ condition = "input.region_mode == 'bbox'",
424
+ helpText("Left‑click once for the SW corner, once more for the NE corner."),
425
+ leafletOutput("map_two_click", height = "300px"),
426
+ br(), actionButton("clear_bbox", "Clear bounding box"), br(), br(),
427
+ verbatimTextOutput("bbox_coords")
428
+ ),
429
+
430
+ # Date‑range selector (replaces year toggle)
431
+ dateRangeInput("date_range", "Select Date Range:",
432
+ start = Sys.Date() - 365,
433
+ end = Sys.Date(),
434
+ min = "2010-01-01",
435
+ max = Sys.Date()),
436
+
437
+ # Query by iconic class or species
438
+ radioButtons("query_type", "Query By:",
439
+ choices = c("Taxon Class" = "iconic",
440
+ "Exact Species Name" = "species")),
441
+ conditionalPanel(
442
+ condition = "input.query_type == 'iconic'",
443
+ selectInput("iconic_taxon", "Select Taxon Class:",
444
+ choices = c("Aves", "Mammalia", "Reptilia", "Amphibia",
445
+ "Actinopterygii", "Mollusca", "Animalia"),
446
+ selected = "Aves")
447
+ ),
448
+ conditionalPanel(
449
+ condition = "input.query_type == 'species'",
450
+ textInput("species_name", "Enter exact species name (e.g. Puma concolor)", "")
451
+ ),
452
+
453
+ actionButton("run_query", "Run Query", icon = icon("play")),
454
+ hr(),
455
+ downloadButton("downloadAll", "Download ALL Data CSV", icon = icon("download"))
456
+ ),
457
+
458
+ # == About Panel ==
459
+ tabPanel("About",
460
+ br(),
461
+ p("This Shiny application was created by Diego Ellis‑Soto (UC Berkeley) with collaborators Liam U. Taylor (Bowdoin University), Lizzy Edson (California Academy of Sciences), Christopher J. Schell (UC Berkeley), Carl Boettiger (UC Berkeley), and Rebecca Johnson (California Academy of Sciences). It retrieves iNaturalist observations annotated as 'Dead' wildlife in real time for research and conservation analyses."),
462
+ p("Participatory science platforms like iNaturalist empower community members to document biodiversity. Observations of dead wildlife help detect mortality events, disease spread, and other threats that can be critical for conservation."),
463
+ p("Please contribute responsibly and ethically. Data on wildlife mortalities can inform management decisions and raise public awareness.")
464
+ ),
465
+
466
+ # == How to Use Panel ==
467
+ tabPanel("How to Use",
468
+ br(),
469
+ p("1. Choose a region: enter a numeric place_id **or** define a custom bounding box by clicking twice on the map."),
470
+ p("2. Pick a date range for your query. The app will fetch data week‑by‑week within that range."),
471
+ p("3. Select either an iconic taxon class (e.g. Aves) **or** an exact species name."),
472
+ p("4. Click **Run Query**. Visualisations and the full data table will update. You can download the complete dataset using the 'Download ALL Data CSV' button.")
473
+ )
474
+ )
475
+ ),
476
+
477
+ mainPanel(
478
+ tabsetPanel(
479
+ tabPanel("Daily Time Series", withSpinner(plotOutput("dailyPlot"), type = 6)),
480
+ tabPanel("Top Species", withSpinner(plotOutput("speciesPlot"), type = 6)),
481
+ tabPanel("Hexbin Map (All Data)", withSpinner(plotOutput("hotspotMap"), type = 6)),
482
+ tabPanel("All Data Table", withSpinner(DT::dataTableOutput("dataTable"), type = 6))
483
+ )
484
+ )
485
+ )
486
+ )
487
+
488
+ ##################################################################
489
+ # Server
490
+ ##################################################################
491
+
492
+ server <- function(input, output, session) {
493
+
494
+ # Reactive values for bounding box clicks
495
+ rv <- reactiveValues(corner1 = NULL, corner2 = NULL, bbox = NULL)
496
+
497
+ # Initialise leaflet map
498
+ output$map_two_click <- renderLeaflet({
499
+ leaflet() %>% addTiles() %>% setView(lng = -100, lat = 40, zoom = 4)
500
+ })
501
+
502
+ # Handle map clicks
503
+ observeEvent(input$map_two_click_click, {
504
+ req(input$region_mode == "bbox")
505
+ click <- input$map_two_click_click
506
+ if (is.null(click)) return()
507
+
508
+ lat_clicked <- click$lat; lng_clicked <- click$lng
509
+ if (is.null(rv$corner1)) {
510
+ rv$corner1 <- c(lat_clicked, lng_clicked)
511
+ showNotification("First corner set. Now click for the opposite corner.")
512
+ leafletProxy("map_two_click") %>% clearMarkers() %>%
513
+ addMarkers(lng = lng_clicked, lat = lat_clicked, popup = "Corner 1")
514
+ rv$corner2 <- NULL; rv$bbox <- NULL
515
+ } else {
516
+ rv$corner2 <- c(lat_clicked, lng_clicked)
517
+ lat_min <- min(rv$corner1[1], rv$corner2[1]); lat_max <- max(rv$corner1[1], rv$corner2[1])
518
+ lng_min <- min(rv$corner1[2], rv$corner2[2]); lng_max <- max(rv$corner1[2], rv$corner2[2])
519
+ rv$bbox <- c(lat_min, lng_min, lat_max, lng_max)
520
+
521
+ showNotification("Second corner set. Bounding box defined!", duration = 2)
522
+
523
+ leafletProxy("map_two_click") %>% clearMarkers() %>%
524
+ addMarkers(lng = rv$corner1[2], lat = rv$corner1[1], popup = "Corner 1") %>%
525
+ addMarkers(lng = rv$corner2[2], lat = rv$corner2[1], popup = "Corner 2") %>%
526
+ clearShapes() %>%
527
+ addRectangles(lng1 = lng_min, lat1 = lat_min, lng2 = lng_max, lat2 = lat_max,
528
+ fillColor = "red", fillOpacity = 0.2, color = "red")
529
+ }
530
+ })
531
+
532
+ observeEvent(input$clear_bbox, {
533
+ rv$corner1 <- rv$corner2 <- rv$bbox <- NULL
534
+ leafletProxy("map_two_click") %>% clearMarkers() %>% clearShapes()
535
+ })
536
+
537
+ output$bbox_coords <- renderText({
538
+ req(input$region_mode == "bbox")
539
+ if (is.null(rv$bbox)) "No bounding box defined yet." else paste0(
540
+ "Bounding box:\nSW: (", rv$bbox[1], ", ", rv$bbox[2], ")\nNE: (", rv$bbox[3], ", ", rv$bbox[4], ")")
541
+ })
542
+
543
+ # Store query results
544
+ result_data <- reactiveVal(NULL)
545
+
546
+ # Run query
547
+ observeEvent(input$run_query, {
548
+ req(input$date_range)
549
+ start_date <- as.Date(input$date_range[1])
550
+ end_date <- as.Date(input$date_range[2])
551
+
552
+ # Region params
553
+ place_id_val <- NULL; swlat_val <- NULL; swlng_val <- NULL; nelat_val <- NULL; nelng_val <- NULL
554
+ if (input$region_mode == "place") {
555
+ place_id_val <- input$place_id
556
+ } else {
557
+ validate(need(!is.null(rv$bbox), "Please define a bounding box by clicking twice on the map."))
558
+ swlat_val <- rv$bbox[1]; swlng_val <- rv$bbox[2]; nelat_val <- rv$bbox[3]; nelng_val <- rv$bbox[4]
559
+ }
560
+
561
+ # Taxon params
562
+ iconic_val <- if (input$query_type == "iconic") input$iconic_taxon else NULL
563
+ species_val <- if (input$query_type == "species") input$species_name else NULL
564
+
565
+ withProgress(message = 'Fetching data from iNaturalist (weekly)…', value = 0, {
566
+ incProgress(0.4)
567
+ query_res <- getDeadVertebrates_dateRange(
568
+ start_date = start_date,
569
+ end_date = end_date,
570
+ place_id = place_id_val,
571
+ swlat = swlat_val,
572
+ swlng = swlng_val,
573
+ nelat = nelat_val,
574
+ nelng = nelng_val,
575
+ iconic_taxa = iconic_val,
576
+ taxon_name = species_val
577
+ )
578
+ result_data(query_res)
579
+ incProgress(1)
580
+ })
581
+ })
582
+
583
+ # Render plots
584
+ output$dailyPlot <- renderPlot({ req(result_data()); result_data()$daily_plot })
585
+ output$speciesPlot <- renderPlot({ req(result_data()); result_data()$top_species_plot })
586
+ output$hotspotMap <- renderPlot({ req(result_data()); result_data()$map_hotspots_gg })
587
+
588
+ # Data table (all data)
589
+ output$dataTable <- DT::renderDataTable({
590
+ req(result_data())
591
+ df <- result_data()$merged_df_all
592
+ if (nrow(df) == 0) {
593
+ return(DT::datatable(data.frame(Message = "No records found"), options = list(pageLength = 20)))
594
+ }
595
+
596
+ df <- df %>% mutate(inat_link = paste0("<a href='https://www.inaturalist.org/observations/", id, "' target='_blank'>", id, "</a>"))
597
+ photo_col <- "taxon.default_photo.square_url"
598
+ if (photo_col %in% names(df)) {
599
+ df$image_thumb <- ifelse(!is.na(df[[photo_col]]) & df[[photo_col]] != "",
600
+ paste0("<img src='", df[[photo_col]], "' width='50'/>"), "No Img")
601
+ } else {
602
+ df$image_thumb <- "No Img"
603
+ }
604
+
605
+ show_cols <- c("inat_link", "image_thumb", "taxon.name", "created_at_details.date",
606
+ setdiff(names(df), c("inat_link", "image_thumb", "taxon.name", "created_at_details.date")))
607
+
608
+ DT::datatable(df[, show_cols, drop = FALSE], escape = FALSE,
609
+ options = list(pageLength = 20, autoWidth = TRUE))
610
+ })
611
+
612
+ # Download handler – ALL data only
613
+ output$downloadAll <- downloadHandler(
614
+ filename = function() paste0("inat_dead_ALL_", Sys.Date(), ".csv"),
615
+ content = function(file) { req(result_data()); readr::write_csv(result_data()$merged_df_all, file) }
616
+ )
617
+ }
618
+
619
+ shinyApp(ui = ui, server = server)
poc/app_no90_dateslider_progress_bar_2025.R ADDED
@@ -0,0 +1,595 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ##################################################################
2
+ # Single R Script: Mortality Analysis + Shiny – Hexbin Map (All Data)
3
+ # Date-range queries (no hard-coded years)
4
+ ##################################################################
5
+
6
+ ### 1) Install/Load Required Packages ####
7
+ required_packages <- c(
8
+ "httr", "jsonlite", "tidyverse", "glue", "lubridate",
9
+ "wesanderson", "viridis", "hexbin", "shinycssloaders",
10
+ "DT", "maps", "mapdata", "leaflet", "leaflet.extras",
11
+ "shinythemes", "shiny"
12
+ )
13
+
14
+ installed_packages <- rownames(installed.packages())
15
+ for (pkg in required_packages) {
16
+ if (!pkg %in% installed_packages) {
17
+ install.packages(pkg, dependencies = TRUE)
18
+ }
19
+ }
20
+
21
+ library(httr)
22
+ library(jsonlite)
23
+ library(tidyverse)
24
+ library(glue)
25
+ library(lubridate)
26
+ library(wesanderson)
27
+ library(viridis)
28
+ library(hexbin)
29
+ library(shinycssloaders)
30
+ library(DT)
31
+ library(maps)
32
+ library(mapdata)
33
+ library(leaflet)
34
+ library(leaflet.extras)
35
+ library(shinythemes)
36
+ library(shiny)
37
+
38
+ ##################################################################
39
+ # 2) Mortality-Analysis Functions
40
+ ##################################################################
41
+
42
+ # -- Base function to fetch dead observations for a specific date span --
43
+ fetch_dead_data_once <- function(
44
+ place_id = NULL,
45
+ swlat = NULL,
46
+ swlng = NULL,
47
+ nelat = NULL,
48
+ nelng = NULL,
49
+ start_date,
50
+ end_date,
51
+ iconic_taxa = NULL,
52
+ taxon_name = NULL,
53
+ conservation_status = NULL,
54
+ per_page = 200,
55
+ max_pages = 200
56
+ ) {
57
+ base_url <- "https://api.inaturalist.org/v1/observations"
58
+
59
+ q_parts <- list(
60
+ "term_id=17", # 'Dead' annotation
61
+ "term_value_id=19", # 'Dead' annotation
62
+ "verifiable=true", # only verifiable
63
+ glue("d1={start_date}"), # start date
64
+ glue("d2={end_date}"), # end date
65
+ "order=desc",
66
+ "order_by=created_at",
67
+ glue("per_page={per_page}")
68
+ )
69
+
70
+ if (!is.null(iconic_taxa) && iconic_taxa != "") {
71
+ q_parts <- c(q_parts, glue("iconic_taxa={iconic_taxa}"))
72
+ }
73
+ if (!is.null(taxon_name) && taxon_name != "") {
74
+ q_parts <- c(q_parts, glue("taxon_name={URLencode(taxon_name)}"))
75
+ }
76
+ if (!is.null(conservation_status) && conservation_status != "") {
77
+ if (!grepl("=", conservation_status, fixed = TRUE)) {
78
+ q_parts <- c(q_parts, glue("cs={URLencode(conservation_status)}"))
79
+ } else {
80
+ q_parts <- c(q_parts, conservation_status)
81
+ }
82
+ }
83
+ query_params <- paste(q_parts, collapse = "&")
84
+
85
+ # Build location portion of query
86
+ loc_part <- ""
87
+ if (!is.null(place_id)) {
88
+ loc_part <- glue("&place_id={place_id}")
89
+ } else if (!is.null(swlat) && !is.null(swlng) &&
90
+ !is.null(nelat) && !is.null(nelng)) {
91
+ loc_part <- glue("&nelat={nelat}&nelng={nelng}&swlat={swlat}&swlng={swlng}")
92
+ } else {
93
+ stop("Must provide either 'place_id' OR bounding box (swlat, swlng, nelat, nelng).")
94
+ }
95
+
96
+ observations_list <- list()
97
+ current_page <- 1
98
+
99
+ while (current_page <= max_pages) {
100
+ query_url <- paste0(
101
+ base_url, "?", query_params, "&page=", current_page, loc_part
102
+ )
103
+
104
+ message("Fetching page ", current_page,
105
+ " [", start_date, " to ", end_date, "]:\n", query_url)
106
+
107
+ resp <- GET(query_url)
108
+ if (http_error(resp)) {
109
+ warning("HTTP error on page ", current_page, ": ", status_code(resp))
110
+ break
111
+ }
112
+
113
+ parsed <- content(resp, as = "text", encoding = "UTF-8") %>%
114
+ fromJSON(flatten = TRUE)
115
+
116
+ if (length(parsed$results) == 0) {
117
+ message("No more results at page ", current_page)
118
+ break
119
+ }
120
+
121
+ obs_page_df <- as_tibble(parsed$results)
122
+ observations_list[[current_page]] <- obs_page_df
123
+
124
+ # reached the last page?
125
+ if (nrow(obs_page_df) < per_page) {
126
+ message("Reached last page of results at page ", current_page)
127
+ break
128
+ }
129
+
130
+ current_page <- current_page + 1
131
+ Sys.sleep(1.4) # Polite pause
132
+ }
133
+
134
+ observations_all <- bind_rows(observations_list)
135
+ return(observations_all)
136
+ }
137
+
138
+ # -- Fetch data weekly across an arbitrary date range --
139
+ getDeadVertebrates_dateRange <- function(
140
+ start_date, # Date object or string YYYY-MM-DD
141
+ end_date, # Date object or string YYYY-MM-DD
142
+ place_id = NULL,
143
+ swlat = NULL,
144
+ swlng = NULL,
145
+ nelat = NULL,
146
+ nelng = NULL,
147
+ iconic_taxa = NULL,
148
+ taxon_name = NULL,
149
+ conservation_status = NULL,
150
+ per_page = 500,
151
+ max_pages = 500,
152
+ outdir = NULL
153
+ ) {
154
+ # Ensure dates are Date objects
155
+ start_date <- as.Date(start_date)
156
+ end_date <- as.Date(end_date)
157
+ if (start_date > end_date) {
158
+ stop("start_date must be earlier than end_date")
159
+ }
160
+
161
+ week_starts <- seq.Date(start_date, end_date, by = "1 week")
162
+ all_weeks_list <- list()
163
+
164
+ for (i in seq_along(week_starts)) {
165
+ st <- week_starts[i]
166
+ ed <- if (i < length(week_starts)) week_starts[i + 1] - 1 else end_date
167
+
168
+ message("\n--- Querying ", st, " to ", ed, " ---")
169
+
170
+ # ── NEW: update Shiny progress bar ──────────────────────────────
171
+ try(
172
+ incProgress(
173
+ amount = 1 / length(week_starts),
174
+ detail = glue("Week {i} of {length(week_starts)}: {st} to {ed}")
175
+ ),
176
+ silent = TRUE
177
+ )
178
+ # ────────────────────────────────────────────────────────────────
179
+
180
+ df_week <- fetch_dead_data_once(
181
+ place_id = place_id,
182
+ swlat = swlat,
183
+ swlng = swlng,
184
+ nelat = nelat,
185
+ nelng = nelng,
186
+ start_date = st,
187
+ end_date = ed,
188
+ iconic_taxa = iconic_taxa,
189
+ taxon_name = taxon_name,
190
+ conservation_status = conservation_status,
191
+ per_page = per_page,
192
+ max_pages = max_pages
193
+ )
194
+ all_weeks_list[[i]] <- df_week
195
+ Sys.sleep(1.4)
196
+ }
197
+
198
+ merged_df_all <- bind_rows(all_weeks_list)
199
+
200
+ # If empty, return placeholders
201
+ if (!"created_at_details.date" %in% names(merged_df_all) ||
202
+ nrow(merged_df_all) == 0) {
203
+ placeholder_plot <- function(title) {
204
+ ggplot() + labs(title = title, x = NULL, y = NULL) + theme_void()
205
+ }
206
+ return(list(
207
+ merged_df_all = merged_df_all,
208
+ merged_df = merged_df_all,
209
+ daily_plot = placeholder_plot("No 'Dead' Observations Found"),
210
+ top_species_plot = placeholder_plot("No species data"),
211
+ map_hotspots_gg = placeholder_plot("No data for map"),
212
+ daily_90th_quant = NA
213
+ ))
214
+ }
215
+
216
+ # Add year column for plotting
217
+ merged_df_all <- merged_df_all %>%
218
+ mutate(obs_date = as.Date(observed_on),
219
+ Window = format(obs_date, "%Y"))
220
+
221
+ # Optionally write out
222
+ if (!is.null(outdir)) {
223
+ if (!dir.exists(outdir)) dir.create(outdir, recursive = TRUE)
224
+ readr::write_csv(merged_df_all, file.path(outdir, "merged_df_ALL_data.csv"))
225
+ }
226
+
227
+ # Aggregate counts by day
228
+ counts_by_day <- merged_df_all %>%
229
+ group_by(Window, obs_date) %>%
230
+ summarise(n = n_distinct(id), .groups = "drop")
231
+
232
+ y_max_value <- max(counts_by_day$n, na.rm = TRUE)
233
+ # wes_colors <- wes_palette("Zissou1", length(unique(counts_by_day$Window)), type = "discrete")
234
+
235
+ # wong_palette <- c(
236
+ # "#000000", "#CC6666", "#D55E00", "#E69F00",
237
+ # "#0072B2", "#56B4E9",
238
+ # "#009E73", "#66CC99", "#999933", "#F0E442",
239
+ # "#CC79A7"
240
+ # )
241
+
242
+ daily_plot <- ggplot(counts_by_day, aes(x = obs_date, y = n, color = Window)) +
243
+ geom_line(size = 1.2) +
244
+ geom_point(size = 2) +
245
+ scale_x_date(date_labels = "%b %d", date_breaks = "1 month") +
246
+ scale_y_continuous(limits = c(0, y_max_value)) +
247
+ # scale_color_manual(values = wong_palette) +
248
+ labs(
249
+ title = glue("Daily 'Dead' Observations ({start_date} to {end_date})"),
250
+ x = "Date",
251
+ y = "Number of Observations",
252
+ color = "Year"
253
+ ) +
254
+ theme_minimal(base_size = 14) +
255
+ theme(axis.text.x = element_text(angle = 45, hjust = 1))
256
+
257
+ # Top species plot
258
+ if ("taxon.name" %in% names(merged_df_all)) {
259
+ species_counts <- merged_df_all %>%
260
+ filter(!is.na(taxon.name)) %>%
261
+ group_by(Window, taxon.name) %>%
262
+ summarise(dead_count = n(), .groups = "drop")
263
+
264
+ top_species_overall <- species_counts %>%
265
+ group_by(taxon.name) %>%
266
+ summarise(total_dead = sum(dead_count)) %>%
267
+ arrange(desc(total_dead)) %>%
268
+ slice_head(n = 20)
269
+
270
+ species_top20 <- species_counts %>%
271
+ filter(taxon.name %in% top_species_overall$taxon.name)
272
+
273
+ top_species_plot <- ggplot(species_top20, aes(
274
+ x = reorder(taxon.name, -dead_count),
275
+ y = dead_count,
276
+ fill= Window
277
+ )) +
278
+ geom_col(position = position_dodge(width = 0.7)) +
279
+ coord_flip() +
280
+ labs(
281
+ title = "Top 20 Species with 'Dead' Observations",
282
+ x = "Species",
283
+ y = "Number of Dead Observations",
284
+ fill = "Year"
285
+ ) +
286
+ theme_minimal(base_size = 14)
287
+ #+
288
+ # scale_fill_manual(values = wong_palette)
289
+ } else {
290
+ top_species_plot <- ggplot() +
291
+ labs(title = "No 'taxon.name' column found", x = NULL, y = NULL) +
292
+ theme_void()
293
+ }
294
+
295
+ # Identify high-mortality days (>= 90th percentile)
296
+ daily_quantile <- quantile(counts_by_day$n, probs = 0.90, na.rm = TRUE)
297
+ high_mortality_days <- counts_by_day %>%
298
+ filter(n >= daily_quantile) %>%
299
+ pull(obs_date)
300
+
301
+ merged_high <- merged_df_all %>%
302
+ filter(obs_date %in% high_mortality_days)
303
+
304
+ # Hexbin map for ALL data
305
+ if ("location" %in% names(merged_df_all)) {
306
+ location_df_all <- merged_df_all %>%
307
+ filter(!is.na(location) & location != "") %>%
308
+ separate(location, into = c("lat_str", "lon_str"), sep = ",", remove = FALSE) %>%
309
+ mutate(latitude = as.numeric(lat_str), longitude = as.numeric(lon_str))
310
+
311
+ if (nrow(location_df_all) == 0) {
312
+ map_hotspots_gg <- ggplot() +
313
+ labs(title = "No spatial data available for map") +
314
+ theme_void()
315
+ } else {
316
+ # Calculate map limits from the data actually returned
317
+ x_limits <- range(location_df_all$longitude, na.rm = TRUE)
318
+ y_limits <- range(location_df_all$latitude, na.rm = TRUE)
319
+
320
+ map_hotspots_gg <- ggplot() +
321
+ borders("world", fill = "gray80", colour = "white") +
322
+ stat_bin_hex(
323
+ data = location_df_all,
324
+ aes(x = longitude, y = latitude),
325
+ bins = 500, # play around with this
326
+ color = "black",
327
+ alpha = 0.8
328
+ ) +
329
+ scale_fill_viridis_c(option = "plasma", name = "Observation Count") +
330
+ coord_quickmap(xlim = x_limits, ylim = y_limits, expand = TRUE) +
331
+ labs(
332
+ title = glue("'Dead' Wildlife Hexbin Map ({start_date} to {end_date})"),
333
+ x = "Longitude",
334
+ y = "Latitude"
335
+ ) +
336
+ theme_classic(base_size = 14) +
337
+ theme(
338
+ axis.text = element_text(face = "bold", size = 14, colour = "black"),
339
+ axis.title = element_text(face = "bold", size = 16, colour = "black")
340
+ )
341
+ }
342
+ } else {
343
+ map_hotspots_gg <- ggplot() +
344
+ labs(title = "No 'location' column for map") +
345
+ theme_void()
346
+ }
347
+
348
+ # Save optional outputs
349
+ if (!is.null(outdir)) {
350
+ readr::write_csv(merged_high, file.path(outdir, "merged_df_top90.csv"))
351
+ ggsave(file.path(outdir, "daily_plot.png"), daily_plot, width = 8, height = 5, dpi = 300)
352
+ ggsave(file.path(outdir, "top_species_plot.png"), top_species_plot, width = 7, height = 7, dpi = 300)
353
+ ggsave(file.path(outdir, "hexbin_map.png"), map_hotspots_gg, width = 8, height = 5, dpi = 300)
354
+ }
355
+
356
+ return(list(
357
+ merged_df_all = merged_df_all,
358
+ merged_df = merged_high, # kept for potential future use
359
+ daily_plot = daily_plot,
360
+ top_species_plot = top_species_plot,
361
+ map_hotspots_gg = map_hotspots_gg,
362
+ daily_90th_quant = daily_quantile
363
+ ))
364
+ }
365
+
366
+ ##################################################################
367
+ # 3) Shiny App: UI + Server
368
+ ##################################################################
369
+
370
+ ui <- fluidPage(
371
+ theme = shinytheme("cosmo"),
372
+
373
+ # -- Logo and Title --
374
+ fluidRow(
375
+ column(width = 2, tags$img(src = "www/all_logos.png", height = "400px")),
376
+ column(width = 10, titlePanel("Dead Wildlife Observations from iNaturalist"))
377
+ ),
378
+ hr(),
379
+
380
+ sidebarLayout(
381
+ sidebarPanel(
382
+ tabsetPanel(id = "sidebar_tabs",
383
+
384
+ # == Query Panel ==
385
+ tabPanel("Query",
386
+ br(),
387
+ radioButtons("region_mode", "Region Input Mode:",
388
+ choices = c("Enter Numeric place_id" = "place",
389
+ "Two-Click Bounding Box" = "bbox"),
390
+ selected = "bbox"),
391
+
392
+ # place_id input
393
+ conditionalPanel(
394
+ condition = "input.region_mode == 'place'",
395
+ numericInput("place_id", "Numeric place_id (e.g. 1 = USA, 14 = California)",
396
+ value = 1, min = 1, max = 999999, step = 1)
397
+ ),
398
+
399
+ # Bounding-box selector
400
+ conditionalPanel(
401
+ condition = "input.region_mode == 'bbox'",
402
+ helpText("Left-click once for the SW corner, once more for the NE corner."),
403
+ leafletOutput("map_two_click", height = "300px"),
404
+ br(), actionButton("clear_bbox", "Clear bounding box"), br(), br(),
405
+ verbatimTextOutput("bbox_coords")
406
+ ),
407
+
408
+ # Date-range selector
409
+ dateRangeInput("date_range", "Select Date Range:",
410
+ start = Sys.Date() - 365,
411
+ end = Sys.Date(),
412
+ min = "2010-01-01",
413
+ max = Sys.Date()),
414
+
415
+ # Query by iconic class or species
416
+ radioButtons("query_type", "Query By:",
417
+ choices = c("Taxon Class" = "iconic",
418
+ "Exact Species Name" = "species")),
419
+ conditionalPanel(
420
+ condition = "input.query_type == 'iconic'",
421
+ selectInput("iconic_taxon", "Select Taxon Class:",
422
+ choices = c("Aves", "Mammalia", "Reptilia", "Amphibia",
423
+ "Actinopterygii", "Mollusca", "Animalia"),
424
+ selected = "Aves")
425
+ ),
426
+ conditionalPanel(
427
+ condition = "input.query_type == 'species'",
428
+ textInput("species_name", "Enter exact species name (e.g. Puma concolor)", "")
429
+ ),
430
+
431
+ actionButton("run_query", "Run Query", icon = icon("play")),
432
+ hr(),
433
+ downloadButton("downloadAll", "Download ALL Data CSV", icon = icon("download"))
434
+ ),
435
+
436
+ # == About Panel ==
437
+ tabPanel("About",
438
+ br(),
439
+ p("This Shiny application was created by Diego Ellis-Soto (UC Berkeley) with collaborators Liam U. Taylor (Bowdoin University), Lizzy Edson (California Academy of Sciences), Christopher J. Schell (UC Berkeley), Carl Boettiger (UC Berkeley), and Rebecca Johnson (California Academy of Sciences). It retrieves iNaturalist observations annotated as 'Dead' wildlife in real time for research and conservation analyses."),
440
+ p("Participatory science platforms like iNaturalist empower community members to document biodiversity. Observations of dead wildlife help detect mortality events, disease spread, and other threats that can be critical for conservation."),
441
+ p("Please contribute responsibly and ethically. Data on wildlife mortalities can inform management decisions and raise public awareness.")
442
+ ),
443
+
444
+ # == How to Use Panel ==
445
+ tabPanel("How to Use",
446
+ br(),
447
+ p("1. Choose a region: enter a numeric place_id **or** define a custom bounding box by clicking twice on the map."),
448
+ p("2. Pick a date range for your query. The app will fetch data week-by-week within that range."),
449
+ p("3. Select either an iconic taxon class (e.g. Aves) **or** an exact species name."),
450
+ p("4. Click **Run Query**. Visualisations and the full data table will update. You can download the complete dataset using the 'Download ALL Data CSV' button.")
451
+ )
452
+ )
453
+ ),
454
+
455
+ mainPanel(
456
+ tabsetPanel(
457
+ tabPanel("Daily Time Series", withSpinner(plotOutput("dailyPlot"), type = 6)),
458
+ tabPanel("Top Species", withSpinner(plotOutput("speciesPlot"), type = 6)),
459
+ tabPanel("Hexbin Map (All Data)", withSpinner(plotOutput("hotspotMap"), type = 6)),
460
+ tabPanel("All Data Table", withSpinner(DT::dataTableOutput("dataTable"), type = 6))
461
+ )
462
+ )
463
+ )
464
+ )
465
+
466
+ ##################################################################
467
+ # Server
468
+ ##################################################################
469
+
470
+ server <- function(input, output, session) {
471
+
472
+ # Reactive values for bounding box clicks
473
+ rv <- reactiveValues(corner1 = NULL, corner2 = NULL, bbox = NULL)
474
+
475
+ # Initialise leaflet map
476
+ output$map_two_click <- renderLeaflet({
477
+ leaflet() %>% addTiles() %>% setView(lng = -100, lat = 40, zoom = 4)
478
+ })
479
+
480
+ # Handle map clicks
481
+ observeEvent(input$map_two_click_click, {
482
+ req(input$region_mode == "bbox")
483
+ click <- input$map_two_click_click
484
+ if (is.null(click)) return()
485
+
486
+ lat_clicked <- click$lat; lng_clicked <- click$lng
487
+ if (is.null(rv$corner1)) {
488
+ rv$corner1 <- c(lat_clicked, lng_clicked)
489
+ showNotification("First corner set. Now click for the opposite corner.")
490
+ leafletProxy("map_two_click") %>% clearMarkers() %>%
491
+ addMarkers(lng = lng_clicked, lat = lat_clicked, popup = "Corner 1")
492
+ rv$corner2 <- NULL; rv$bbox <- NULL
493
+ } else {
494
+ rv$corner2 <- c(lat_clicked, lng_clicked)
495
+ lat_min <- min(rv$corner1[1], rv$corner2[1]); lat_max <- max(rv$corner1[1], rv$corner2[1])
496
+ lng_min <- min(rv$corner1[2], rv$corner2[2]); lng_max <- max(rv$corner1[2], rv$corner2[2])
497
+ rv$bbox <- c(lat_min, lng_min, lat_max, lng_max)
498
+
499
+ showNotification("Second corner set. Bounding box defined!", duration = 2)
500
+
501
+ leafletProxy("map_two_click") %>% clearMarkers() %>%
502
+ addMarkers(lng = rv$corner1[2], lat = rv$corner1[1], popup = "Corner 1") %>%
503
+ addMarkers(lng = rv$corner2[2], lat = rv$corner2[1], popup = "Corner 2") %>%
504
+ clearShapes() %>%
505
+ addRectangles(lng1 = lng_min, lat1 = lat_min, lng2 = lng_max, lat2 = lat_max,
506
+ fillColor = "red", fillOpacity = 0.2, color = "red")
507
+ }
508
+ })
509
+
510
+ observeEvent(input$clear_bbox, {
511
+ rv$corner1 <- rv$corner2 <- rv$bbox <- NULL
512
+ leafletProxy("map_two_click") %>% clearMarkers() %>% clearShapes()
513
+ })
514
+
515
+ output$bbox_coords <- renderText({
516
+ req(input$region_mode == "bbox")
517
+ if (is.null(rv$bbox)) "No bounding box defined yet." else paste0(
518
+ "Bounding box:\nSW: (", rv$bbox[1], ", ", rv$bbox[2], ")\nNE: (", rv$bbox[3], ", ", rv$bbox[4], ")")
519
+ })
520
+
521
+ # Store query results
522
+ result_data <- reactiveVal(NULL)
523
+
524
+ # Run query
525
+ observeEvent(input$run_query, {
526
+ req(input$date_range)
527
+ start_date <- as.Date(input$date_range[1])
528
+ end_date <- as.Date(input$date_range[2])
529
+
530
+ # Region params
531
+ place_id_val <- NULL; swlat_val <- NULL; swlng_val <- NULL; nelat_val <- NULL; nelng_val <- NULL
532
+ if (input$region_mode == "place") {
533
+ place_id_val <- input$place_id
534
+ } else {
535
+ validate(need(!is.null(rv$bbox), "Please define a bounding box by clicking twice on the map."))
536
+ swlat_val <- rv$bbox[1]; swlng_val <- rv$bbox[2]; nelat_val <- rv$bbox[3]; nelng_val <- rv$bbox[4]
537
+ }
538
+
539
+ # Taxon params
540
+ iconic_val <- if (input$query_type == "iconic") input$iconic_taxon else NULL
541
+ species_val <- if (input$query_type == "species") input$species_name else NULL
542
+
543
+ withProgress(message = 'Fetching data from iNaturalist…', value = 0, {
544
+ query_res <- getDeadVertebrates_dateRange(
545
+ start_date = start_date,
546
+ end_date = end_date,
547
+ place_id = place_id_val,
548
+ swlat = swlat_val,
549
+ swlng = swlng_val,
550
+ nelat = nelat_val,
551
+ nelng = nelng_val,
552
+ iconic_taxa = iconic_val,
553
+ taxon_name = species_val
554
+ )
555
+ result_data(query_res)
556
+ })
557
+ })
558
+
559
+ # Render plots
560
+ output$dailyPlot <- renderPlot({ req(result_data()); result_data()$daily_plot })
561
+ output$speciesPlot <- renderPlot({ req(result_data()); result_data()$top_species_plot })
562
+ output$hotspotMap <- renderPlot({ req(result_data()); result_data()$map_hotspots_gg })
563
+
564
+ # Data table (all data)
565
+ output$dataTable <- DT::renderDataTable({
566
+ req(result_data())
567
+ df <- result_data()$merged_df_all
568
+ if (nrow(df) == 0) {
569
+ return(DT::datatable(data.frame(Message = "No records found"), options = list(pageLength = 20)))
570
+ }
571
+
572
+ df <- df %>% mutate(inat_link = paste0("<a href='https://www.inaturalist.org/observations/", id, "' target='_blank'>", id, "</a>"))
573
+ photo_col <- "taxon.default_photo.square_url"
574
+ if (photo_col %in% names(df)) {
575
+ df$image_thumb <- ifelse(!is.na(df[[photo_col]]) & df[[photo_col]] != "",
576
+ paste0("<img src='", df[[photo_col]], "' width='50'/>"), "No Img")
577
+ } else {
578
+ df$image_thumb <- "No Img"
579
+ }
580
+
581
+ show_cols <- c("inat_link", "image_thumb", "taxon.name", "created_at_details.date",
582
+ setdiff(names(df), c("inat_link", "image_thumb", "taxon.name", "created_at_details.date")))
583
+
584
+ DT::datatable(df[, show_cols, drop = FALSE], escape = FALSE,
585
+ options = list(pageLength = 20, autoWidth = TRUE))
586
+ })
587
+
588
+ # Download handler – ALL data only
589
+ output$downloadAll <- downloadHandler(
590
+ filename = function() paste0("inat_dead_ALL_", Sys.Date(), ".csv"),
591
+ content = function(file) { req(result_data()); readr::write_csv(result_data()$merged_df_all, file) }
592
+ )
593
+ }
594
+
595
+ shinyApp(ui = ui, server = server)
poc/app_original_backup.R ADDED
@@ -0,0 +1,773 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ##################################################################
2
+ # Single R Script: Mortality Analysis + Shiny - Two-Click BBox
3
+ # Using Weekly Queries (No Hard-Coded Years)
4
+ ##################################################################
5
+
6
+ # See https://www.inaturalist.org/pages/api+recommended+practices
7
+ # Query Rate
8
+ # Please keep requests to about 1 per second, and around 10k API requests a day
9
+ # The API is meant to be used for building applications and for fetching small to medium batches of data. It is not meant to be a way to download data in bulk
10
+ # Requests exceeding this limit might be throttled, and will return an HTTP 429 exception “Too Many Requests”
11
+ # Please add delays into your code to keep under these limits, and especially if you’re getting 429 errors
12
+ # We may block IPs that consistently exceed these limits
13
+ # Please use a single IP address for fetching data. If we think multiple IPs are being used in coordination to bypass rate limits, we may block those IPs regardless of query rate
14
+ # Downloading over 5 GB of media per hour or 24 GB of media per day may result in a permanent block
15
+ # If writing software to interact with the API, please consider using a custom User Agent to identify your application, or iNaturalist username, or something we might use to differentiate your requests
16
+ # The User Agent can be set with an HTTP header, e.g. User-Agent: [[application or user name]
17
+ #
18
+
19
+ ### 1) Install/Load Required Packages ####
20
+ required_packages <- c(
21
+ "httr", "jsonlite", "tidyverse", "glue", "lubridate",
22
+ "wesanderson", "viridis", "shinycssloaders",
23
+ "DT", "maps", "mapdata", "leaflet", "leaflet.extras",
24
+ "shinythemes", "shiny"
25
+ )
26
+
27
+ installed_packages <- rownames(installed.packages())
28
+ for (pkg in required_packages) {
29
+ if (!pkg %in% installed_packages) {
30
+ install.packages(pkg, dependencies = TRUE)
31
+ }
32
+ }
33
+
34
+ library(httr)
35
+ library(jsonlite)
36
+ library(tidyverse)
37
+ library(glue)
38
+ library(lubridate)
39
+ library(wesanderson)
40
+ library(viridis)
41
+ library(shinycssloaders)
42
+ library(DT)
43
+ library(maps)
44
+ library(mapdata)
45
+ library(leaflet)
46
+ library(leaflet.extras)
47
+ library(shinythemes)
48
+ library(shiny)
49
+
50
+ ##################################################################
51
+ # 2) Mortality-Analysis Functions
52
+ ##################################################################
53
+
54
+ # -- Base function to fetch dead observations over a specified date range --
55
+ fetch_dead_data_once <- function(
56
+ place_id = NULL,
57
+ swlat = NULL,
58
+ swlng = NULL,
59
+ nelat = NULL,
60
+ nelng = NULL,
61
+ start_date,
62
+ end_date,
63
+ iconic_taxa = NULL,
64
+ taxon_name = NULL,
65
+ conservation_status = NULL,
66
+ per_page = 200,
67
+ max_pages = 200
68
+ ) {
69
+ base_url <- "https://api.inaturalist.org/v1/observations"
70
+
71
+ q_parts <- list(
72
+ "term_id=17", # 'Dead' annotation
73
+ "term_value_id=19", # 'Dead' annotation
74
+ "verifiable=true", # only verifiable
75
+ glue("d1={start_date}"), # start date
76
+ glue("d2={end_date}"), # end date
77
+ "order=desc",
78
+ "order_by=created_at",
79
+ glue("per_page={per_page}")
80
+ )
81
+
82
+ if (!is.null(iconic_taxa) && iconic_taxa != "") {
83
+ q_parts <- c(q_parts, glue("iconic_taxa={iconic_taxa}"))
84
+ }
85
+
86
+ if (!is.null(taxon_name) && taxon_name != "") {
87
+ q_parts <- c(q_parts, glue("taxon_name={URLencode(taxon_name)}"))
88
+ }
89
+
90
+ if (!is.null(conservation_status) && conservation_status != "") {
91
+ # If you'd like to filter by iNat conservation status,
92
+ # or e.g. pass additional parameters to the API
93
+ if (!grepl("=", conservation_status, fixed = TRUE)) {
94
+ q_parts <- c(q_parts, glue("cs={URLencode(conservation_status)}"))
95
+ } else {
96
+ q_parts <- c(q_parts, conservation_status)
97
+ }
98
+ }
99
+
100
+ query_params <- paste(q_parts, collapse = "&")
101
+
102
+ # Build location portion of query
103
+ loc_part <- ""
104
+ if (!is.null(place_id)) {
105
+ loc_part <- glue("&place_id={place_id}")
106
+ } else if (!is.null(swlat) && !is.null(swlng) &&
107
+ !is.null(nelat) && !is.null(nelng)) {
108
+ loc_part <- glue("&nelat={nelat}&nelng={nelng}&swlat={swlat}&swlng={swlng}")
109
+ } else {
110
+ stop("Must provide either 'place_id' OR bounding box (swlat, swlng, nelat, nelng).")
111
+ }
112
+
113
+ observations_list <- list()
114
+ current_page <- 1
115
+
116
+ while (current_page <= max_pages) {
117
+ query_url <- paste0(
118
+ base_url, "?", query_params, "&page=", current_page, loc_part
119
+ )
120
+
121
+ message("Fetching page ", current_page,
122
+ " [", start_date, " to ", end_date, "]:\n", query_url)
123
+
124
+ resp <- GET(query_url)
125
+ if (http_error(resp)) {
126
+ warning("HTTP error on page ", current_page, ": ", status_code(resp))
127
+ break
128
+ }
129
+
130
+ parsed <- content(resp, as = "text", encoding = "UTF-8") %>%
131
+ fromJSON(flatten = TRUE)
132
+
133
+ if (length(parsed$results) == 0) {
134
+ message("No more results at page ", current_page)
135
+ break
136
+ }
137
+
138
+ obs_page_df <- as_tibble(parsed$results)
139
+ observations_list[[current_page]] <- obs_page_df
140
+
141
+ # If the returned page is smaller than per_page, we've reached the last page
142
+ if (nrow(obs_page_df) < per_page) {
143
+ message("Reached last page of results at page ", current_page)
144
+ break
145
+ }
146
+
147
+ current_page <- current_page + 1
148
+ Sys.sleep(1.5) # Polite pause
149
+ }
150
+
151
+ observations_all <- bind_rows(observations_list)
152
+ return(observations_all)
153
+ }
154
+
155
+ # -- Function to fetch data by iterating through each WEEK of a given year --
156
+ fetch_dead_data_weekly <- function(
157
+ year,
158
+ place_id = NULL,
159
+ swlat = NULL,
160
+ swlng = NULL,
161
+ nelat = NULL,
162
+ nelng = NULL,
163
+ iconic_taxa = NULL,
164
+ taxon_name = NULL,
165
+ conservation_status = NULL,
166
+ per_page = 200,
167
+ max_pages = 200
168
+ ) {
169
+ start_of_year <- as.Date(glue("{year}-01-01"))
170
+ end_of_year <- as.Date(glue("{year}-12-31"))
171
+
172
+ # Create a sequence of "week starts" from Jan 1 to Dec 31
173
+ week_starts <- seq.Date(start_of_year, end_of_year, by = "1 week")
174
+
175
+ weekly_list <- list()
176
+
177
+ for (i in seq_along(week_starts)) {
178
+ start_date <- week_starts[i]
179
+ # If not the last index, end_date = next start - 1 day, else clamp to year-end
180
+ if (i < length(week_starts)) {
181
+ end_date <- week_starts[i + 1] - 1
182
+ } else {
183
+ end_date <- end_of_year
184
+ }
185
+
186
+ message("\n--- Querying ", year, ", Week #", i,
187
+ " [", start_date, " to ", end_date, "] ---")
188
+
189
+ df_week <- fetch_dead_data_once(
190
+ place_id = place_id,
191
+ swlat = swlat,
192
+ swlng = swlng,
193
+ nelat = nelat,
194
+ nelng = nelng,
195
+ start_date = start_date,
196
+ end_date = end_date,
197
+ iconic_taxa = iconic_taxa,
198
+ taxon_name = taxon_name,
199
+ conservation_status = conservation_status,
200
+ per_page = per_page,
201
+ max_pages = max_pages
202
+ )
203
+ weekly_list[[i]] <- df_week
204
+
205
+ Sys.sleep(1.5)
206
+ }
207
+
208
+ year_df <- bind_rows(weekly_list)
209
+ return(year_df)
210
+ }
211
+
212
+ # -- Wrapper that iterates over multiple years, pulling data weekly for each year --
213
+ getDeadVertebrates_weeklyLoop <- function(
214
+ years, # <--- No default: pass your own vector of years
215
+ place_id = NULL,
216
+ swlat = NULL,
217
+ swlng = NULL,
218
+ nelat = NULL,
219
+ nelng = NULL,
220
+ iconic_taxa = NULL,
221
+ taxon_name = NULL,
222
+ conservation_status = NULL,
223
+ per_page = 500,
224
+ max_pages = 500,
225
+ outdir = NULL
226
+ ) {
227
+ all_years_list <- list()
228
+
229
+ # For each year, run weekly fetch
230
+ for (yr in years) {
231
+ message("\n========= YEAR: ", yr, " ==========\n")
232
+ yr_df <- fetch_dead_data_weekly(
233
+ year = yr,
234
+ place_id = place_id,
235
+ swlat = swlat,
236
+ swlng = swlng,
237
+ nelat = nelat,
238
+ nelng = nelng,
239
+ iconic_taxa= iconic_taxa,
240
+ taxon_name = taxon_name,
241
+ conservation_status = conservation_status,
242
+ per_page = per_page,
243
+ max_pages = max_pages
244
+ ) %>%
245
+ mutate(Window = as.character(yr))
246
+
247
+ all_years_list[[as.character(yr)]] <- yr_df
248
+ }
249
+
250
+ merged_df_all <- bind_rows(all_years_list)
251
+
252
+ # If no data found or missing crucial columns, create empty placeholders
253
+ if (!"created_at_details.date" %in% names(merged_df_all) ||
254
+ nrow(merged_df_all) == 0) {
255
+ daily_plot <- ggplot() +
256
+ labs(title = "No 'Dead' Observations Found", x = NULL, y = NULL) +
257
+ theme_void()
258
+
259
+ top_species_plot <- ggplot() +
260
+ labs(title = "No species data", x = NULL, y = NULL) +
261
+ theme_void()
262
+
263
+ map_hotspots_gg <- ggplot() +
264
+ labs(title = "No data for hotspots map") +
265
+ theme_void()
266
+
267
+ return(list(
268
+ merged_df_all = merged_df_all,
269
+ merged_df = merged_df_all,
270
+ daily_plot = daily_plot,
271
+ top_species_plot = top_species_plot,
272
+ map_hotspots_gg = map_hotspots_gg,
273
+ daily_90th_quant = NA
274
+ ))
275
+ }
276
+
277
+ # Optionally write out to disk
278
+ if (!is.null(outdir)) {
279
+ if (!dir.exists(outdir)) {
280
+ dir.create(outdir, recursive = TRUE)
281
+ }
282
+ readr::write_csv(merged_df_all, file.path(outdir, "merged_df_ALL_data.csv"))
283
+ }
284
+
285
+ # Aggregate counts by day
286
+ counts_by_day <- merged_df_all %>%
287
+ mutate(obs_date = as.Date(`observed_on`)) %>%
288
+ group_by(Window, obs_date) %>%
289
+ summarise(n = n_distinct(id), .groups = "drop")
290
+
291
+ y_max_value <- max(counts_by_day$n, na.rm = TRUE)
292
+ n_windows <- length(unique(counts_by_day$Window))
293
+ wes_colors <- wes_palette("Zissou1", n_windows, type = "discrete")
294
+
295
+ # Daily line plot
296
+ daily_plot <- ggplot(counts_by_day, aes(x = obs_date, y = n, color = Window)) +
297
+ geom_line(size = 1.2) +
298
+ geom_point(size = 2) +
299
+ # scale_color_viridis_d() +
300
+ scale_x_date(date_labels = "%b", date_breaks = "1 month") +
301
+ scale_y_continuous(limits = c(0, y_max_value)) +
302
+ labs(
303
+ title = glue("Daily 'Dead' Observations (Years {paste(years, collapse=', ')})"),
304
+ x = "Month",
305
+ y = "Number of Observations",
306
+ color = "Year"
307
+ ) +
308
+ theme_minimal(base_size = 14) +
309
+ theme(axis.text.x = element_text(angle = 45, hjust = 1))
310
+
311
+ # Top species bar plot
312
+ if ("taxon.name" %in% names(merged_df_all)) {
313
+ species_counts <- merged_df_all %>%
314
+ filter(!is.na(taxon.name)) %>%
315
+ group_by(Window, taxon.name) %>%
316
+ summarise(dead_count = n(), .groups = "drop")
317
+
318
+ top_species_overall <- species_counts %>%
319
+ group_by(taxon.name) %>%
320
+ summarise(total_dead = sum(dead_count)) %>%
321
+ arrange(desc(total_dead)) %>%
322
+ slice_head(n = 20)
323
+
324
+ species_top20 <- species_counts %>%
325
+ filter(taxon.name %in% top_species_overall$taxon.name)
326
+
327
+ top_species_plot <- ggplot(species_top20, aes(
328
+ x = reorder(taxon.name, -dead_count),
329
+ y = dead_count,
330
+ fill= Window
331
+ )) +
332
+ geom_col(position = position_dodge(width = 0.7)) +
333
+ coord_flip() +
334
+ # scale_fill_manual(values = wes_colors) +
335
+ # scale_color_viridis_d() +
336
+ labs(
337
+ title = "Top 20 Species with 'Dead' Observations",
338
+ x = "Species",
339
+ y = "Number of Dead Observations",
340
+ fill = "Year"
341
+ ) +
342
+ theme_minimal(base_size = 14)
343
+ } else {
344
+ top_species_plot <- ggplot() +
345
+ labs(title = "No 'taxon.name' column found", x = NULL, y = NULL) +
346
+ theme_void()
347
+ }
348
+
349
+ # Identify "high mortality" days (>= 90th percentile)
350
+ daily_quantile <- quantile(counts_by_day$n, probs = 0.90, na.rm = TRUE)
351
+ high_mortality_days <- counts_by_day %>%
352
+ filter(n >= daily_quantile) %>%
353
+ pull(obs_date)
354
+
355
+ merged_high <- merged_df_all %>%
356
+ mutate(obs_date = as.Date(`observed_on`)) %>%
357
+ filter(obs_date %in% high_mortality_days)
358
+
359
+ # Map of top-90% mortality days
360
+ if ("location" %in% names(merged_high)) {
361
+ location_df <- merged_high %>%
362
+ filter(!is.na(location) & location != "") %>%
363
+ separate(location, into = c("lat_str", "lon_str"), sep = ",", remove = FALSE) %>%
364
+ mutate(
365
+ latitude = as.numeric(lat_str),
366
+ longitude = as.numeric(lon_str)
367
+ )
368
+
369
+ if (nrow(location_df) == 0) {
370
+ map_hotspots_gg <- ggplot() +
371
+ labs(title = "No data in top 90th percentile days with valid location") +
372
+ theme_void()
373
+ } else {
374
+ min_lon <- min(location_df$longitude, na.rm = TRUE)
375
+ max_lon <- max(location_df$longitude, na.rm = TRUE)
376
+ min_lat <- min(location_df$latitude, na.rm = TRUE)
377
+ max_lat <- max(location_df$latitude, na.rm = TRUE)
378
+
379
+ map_hotspots_gg <- ggplot(location_df, aes(x = longitude, y = latitude, color = Window)) +
380
+ borders("world", fill = "gray80", colour = "white") +
381
+ geom_point(alpha = 0.6, size = 2) +
382
+ # scale_color_viridis_d() +
383
+ coord_quickmap(
384
+ xlim = c(min_lon, max_lon),
385
+ ylim = c(min_lat, max_lat),
386
+ expand = TRUE
387
+ ) +
388
+ labs(
389
+ title = glue("Top 90th percentile mortality days ({paste(years, collapse=', ')})"),
390
+ x = "Longitude",
391
+ y = "Latitude",
392
+ color = "Year"
393
+ ) +
394
+ theme_minimal(base_size = 14)
395
+ }
396
+ } else {
397
+ map_hotspots_gg <- ggplot() +
398
+ labs(title = "No 'location' column for top 90% days map") +
399
+ theme_void()
400
+ }
401
+
402
+ # Optionally save outputs
403
+ if (!is.null(outdir)) {
404
+ readr::write_csv(merged_high, file.path(outdir, "merged_df_top90.csv"))
405
+ ggsave(file.path(outdir, "daily_plot.png"),
406
+ daily_plot, width = 8, height = 5, dpi = 300)
407
+ ggsave(file.path(outdir, "top_species_plot.png"),
408
+ top_species_plot, width = 7, height = 7, dpi = 300)
409
+ ggsave(file.path(outdir, "map_hotspots.png"),
410
+ map_hotspots_gg, width = 8, height = 5, dpi = 300)
411
+ }
412
+
413
+ return(list(
414
+ merged_df_all = merged_df_all,
415
+ merged_df = merged_high,
416
+ daily_plot = daily_plot,
417
+ top_species_plot = top_species_plot,
418
+ map_hotspots_gg = map_hotspots_gg,
419
+ daily_90th_quant = daily_quantile
420
+ ))
421
+ }
422
+
423
+ ##################################################################
424
+ # 3) Shiny App: UI + Server (Weekly Queries)
425
+ ##################################################################
426
+
427
+ ui <- fluidPage(
428
+ theme = shinytheme("cosmo"), # Use a professional theme from shinythemes
429
+
430
+ # -- Logo and Title at the top --
431
+ fluidRow(
432
+ column(
433
+ width = 2,
434
+ tags$img(src = "www/all_logos.png", height = "400px")
435
+ ),
436
+ column(
437
+ width = 10,
438
+ titlePanel("Dead Wildlife Observations from iNaturalist")
439
+ )
440
+ ),
441
+ hr(),
442
+
443
+ sidebarLayout(
444
+ sidebarPanel(
445
+ tabsetPanel(
446
+ id = "sidebar_tabs",
447
+
448
+ # == Query Panel ==
449
+ tabPanel(
450
+ title = "Query",
451
+ br(),
452
+ radioButtons("region_mode", "Region Input Mode:",
453
+ choices = c("Enter Numeric place_id" = "place",
454
+ "Two-Click Bounding Box" = "bbox"),
455
+ # choices = c(
456
+ # "Two-Click Bounding Box" = "bbox"),
457
+ selected = "bbox"),
458
+
459
+ # If user chooses numeric "place_id"
460
+ conditionalPanel(
461
+ condition = "input.region_mode == 'place'",
462
+ numericInput("place_id",
463
+ "Numeric place_id (e.g. 1 = USA, 6712 = Canada, 14 = California)",
464
+ value = 1, min = 1, max = 999999, step = 1)
465
+ ),
466
+
467
+ # If user chooses bounding box
468
+ conditionalPanel(
469
+ condition = "input.region_mode == 'bbox'",
470
+ helpText("Left-click once for the SW corner, once more for the NE corner."),
471
+ leafletOutput("map_two_click", height = "300px"),
472
+ br(),
473
+ actionButton("clear_bbox", "Clear bounding box"),
474
+ br(), br(),
475
+ verbatimTextOutput("bbox_coords")
476
+ ),
477
+
478
+ # Years
479
+ checkboxGroupInput("years", "Select Year(s):",
480
+ choices = 2018:2025,
481
+ selected = c(2022, 2023)),
482
+
483
+ # Query by iconic class or exact species
484
+ radioButtons("query_type", "Query By:",
485
+ choices = c("Taxon Class" = "iconic",
486
+ "Exact Species Name" = "species")),
487
+ conditionalPanel(
488
+ condition = "input.query_type == 'iconic'",
489
+ selectInput("iconic_taxon", "Select Taxon Class:",
490
+ choices = c("Aves", "Mammalia", "Reptilia", "Amphibia",
491
+ "Actinopterygii", "Mollusca", "Animalia"),
492
+ selected = "Aves")
493
+ ),
494
+ conditionalPanel(
495
+ condition = "input.query_type == 'species'",
496
+ textInput("species_name", "Enter exact species name (e.g. Puma concolor)", "")
497
+ ),
498
+
499
+ actionButton("run_query", "Run Query", icon = icon("play")),
500
+ hr(),
501
+ downloadButton("downloadTop90", "Download Top-90% CSV", icon = icon("download")),
502
+ br(), br(),
503
+ downloadButton("downloadAll", "Download ALL Data CSV", icon = icon("download"))
504
+ ),
505
+
506
+ # == About Panel ==
507
+ tabPanel(
508
+ title = "About",
509
+ br(),
510
+ p("This Shiny application was created by Diego Ellis Soto (University of California Berkeley).
511
+ It queries iNaturalist for observations that have been annotated as 'Dead' wildlife (term_id=17, term_value_id=19) in real time.
512
+ These data are updated daily on the iNaturalist API. The data is fetched via the iNaturalist API and summarized here designed for scientific or conservation purposes.")
513
+ ),
514
+ # == Participatory Science Panel ==
515
+ tabPanel(
516
+ title = "Participatory Science",
517
+ br(),
518
+ p("Digital platforms of participatory science like iNaturalist allow everyday people to collect and share data about local biodiversity.
519
+ Recording observations of dead wildlife can help us track mortality events, disease spread, and other factors affecting animal populations.
520
+ In fact information on wildlife mortality are often more critical for conservation efforts than living ones."),
521
+ p("We encourage everyone to contribute their sightings responsibly, ensuring that any data on roadkill or other mortalities can help management and conservation efforts, and
522
+ raise public awareness.")
523
+ ),
524
+
525
+ # == How To Use Panel ==
526
+ tabPanel(
527
+ title = "How to Use",
528
+ br(),
529
+ p("This application lets you retrieve data about dead wildlife observations from iNaturalist.
530
+ You can choose to manually provide a numeric place_id or define a custom bounding box by clicking twice on the map."),
531
+ p("You can also decide whether to query by taxon class (e.g. Aves) or by exact species name (e.g. Puma concolor)."),
532
+ # p("After selecting your inputs, press 'Run Query.' Two separate CSV downloads are provided: (1) for all data retrieved, and (2) for only the top-90% mortality days (for hotspot analysis).")
533
+ p("After selecting your inputs, press 'Run Query.' and a CSV for downloads is provided: (1) for all data retrieved.")
534
+ )
535
+ )
536
+ ),
537
+
538
+ mainPanel(
539
+ tabsetPanel(
540
+ tabPanel("Daily Time Series", withSpinner(plotOutput("dailyPlot"), type = 6)),
541
+ tabPanel("Top Species", withSpinner(plotOutput("speciesPlot"), type = 6)),
542
+ tabPanel("Hotspots Map (90th%)", withSpinner(plotOutput("hotspotMap"), type = 6)),
543
+ tabPanel("Data Table (Top-90%)", withSpinner(DT::dataTableOutput("dataTable"), type = 6))
544
+ )
545
+ )
546
+ )
547
+ )
548
+
549
+ server <- function(input, output, session) {
550
+
551
+ # Reactive values for bounding box corners
552
+ rv <- reactiveValues(
553
+ corner1 = NULL,
554
+ corner2 = NULL,
555
+ bbox = NULL
556
+ )
557
+
558
+ # Initialize map
559
+ output$map_two_click <- renderLeaflet({
560
+ leaflet() %>%
561
+ addTiles() %>%
562
+ setView(lng = -100, lat = 40, zoom = 4)
563
+ })
564
+
565
+ # Handle bounding box clicks
566
+ observeEvent(input$map_two_click_click, {
567
+ req(input$region_mode == "bbox")
568
+
569
+ click <- input$map_two_click_click
570
+ if (is.null(click)) return()
571
+
572
+ lat_clicked <- click$lat
573
+ lng_clicked <- click$lng
574
+
575
+ if (is.null(rv$corner1)) {
576
+ rv$corner1 <- c(lat_clicked, lng_clicked)
577
+ showNotification("First corner set. Now click for the opposite corner.")
578
+
579
+ leafletProxy("map_two_click") %>%
580
+ clearMarkers() %>%
581
+ addMarkers(lng = lng_clicked, lat = lat_clicked, popup = "Corner 1")
582
+
583
+ rv$corner2 <- NULL
584
+ rv$bbox <- NULL
585
+
586
+ } else {
587
+ rv$corner2 <- c(lat_clicked, lng_clicked)
588
+
589
+ lat_min <- min(rv$corner1[1], rv$corner2[1])
590
+ lat_max <- max(rv$corner1[1], rv$corner2[1])
591
+ lng_min <- min(rv$corner1[2], rv$corner2[2])
592
+ lng_max <- max(rv$corner1[2], rv$corner2[2])
593
+
594
+ rv$bbox <- c(lat_min, lng_min, lat_max, lng_max)
595
+
596
+ showNotification("Second corner set. Bounding box defined!", duration = 2)
597
+
598
+ leafletProxy("map_two_click") %>%
599
+ clearMarkers() %>%
600
+ addMarkers(lng = rv$corner1[2], lat = rv$corner1[1], popup = "Corner 1") %>%
601
+ addMarkers(lng = rv$corner2[2], lat = rv$corner2[1], popup = "Corner 2") %>%
602
+ clearShapes() %>%
603
+ addRectangles(
604
+ lng1 = lng_min, lat1 = lat_min,
605
+ lng2 = lng_max, lat2 = lat_max,
606
+ fillColor = "red", fillOpacity = 0.2,
607
+ color = "red"
608
+ )
609
+ }
610
+ })
611
+
612
+ observeEvent(input$clear_bbox, {
613
+ rv$corner1 <- NULL
614
+ rv$corner2 <- NULL
615
+ rv$bbox <- NULL
616
+
617
+ leafletProxy("map_two_click") %>%
618
+ clearMarkers() %>%
619
+ clearShapes()
620
+ })
621
+
622
+ output$bbox_coords <- renderText({
623
+ req(input$region_mode == "bbox")
624
+
625
+ if (is.null(rv$bbox)) {
626
+ "No bounding box defined yet."
627
+ } else {
628
+ paste0(
629
+ "Bounding box:\n",
630
+ "SW corner: (", rv$bbox[1], ", ", rv$bbox[2], ")\n",
631
+ "NE corner: (", rv$bbox[3], ", ", rv$bbox[4], ")"
632
+ )
633
+ }
634
+ })
635
+
636
+ # Store final query results
637
+ result_data <- reactiveVal(NULL)
638
+
639
+ # Main "Run Query" button
640
+ observeEvent(input$run_query, {
641
+ req(input$years)
642
+ shiny::validate(need(length(input$years) > 0, "Please select at least one year."))
643
+
644
+ yrs <- as.numeric(input$years)
645
+
646
+ # Region logic
647
+ place_id_val <- NULL
648
+ swlat_val <- NULL
649
+ swlng_val <- NULL
650
+ nelat_val <- NULL
651
+ nelng_val <- NULL
652
+
653
+ if (input$region_mode == "place") {
654
+ place_id_val <- input$place_id
655
+ } else {
656
+ shiny::validate(need(!is.null(rv$bbox), "Please click twice on the map to define bounding box."))
657
+ swlat_val <- rv$bbox[1]
658
+ swlng_val <- rv$bbox[2]
659
+ nelat_val <- rv$bbox[3]
660
+ nelng_val <- rv$bbox[4]
661
+ }
662
+
663
+ # Query type logic
664
+ iconic_val <- NULL
665
+ species_val <- NULL
666
+ if (input$query_type == "iconic") {
667
+ iconic_val <- input$iconic_taxon
668
+ } else {
669
+ species_val <- input$species_name
670
+ }
671
+
672
+ # Fetch data
673
+ withProgress(message = 'Fetching data from iNaturalist (Weekly)...', value = 0, {
674
+ incProgress(0.4)
675
+
676
+ query_res <- getDeadVertebrates_weeklyLoop(
677
+ years = yrs,
678
+ place_id = place_id_val,
679
+ swlat = swlat_val,
680
+ swlng = swlng_val,
681
+ nelat = nelat_val,
682
+ nelng = nelng_val,
683
+ iconic_taxa = iconic_val,
684
+ taxon_name = species_val
685
+ )
686
+
687
+ result_data(query_res)
688
+ incProgress(1)
689
+ })
690
+ })
691
+
692
+ # Output plots
693
+ output$dailyPlot <- renderPlot({
694
+ req(result_data())
695
+ result_data()$daily_plot
696
+ })
697
+
698
+ output$speciesPlot <- renderPlot({
699
+ req(result_data())
700
+ result_data()$top_species_plot
701
+ })
702
+
703
+ output$hotspotMap <- renderPlot({
704
+ req(result_data())
705
+ result_data()$map_hotspots_gg
706
+ })
707
+
708
+ # Output data table (top-90% subset)
709
+ output$dataTable <- DT::renderDataTable({
710
+ req(result_data())
711
+ df <- result_data()$merged_df # top 90% subset
712
+
713
+ if (nrow(df) == 0) {
714
+ return(DT::datatable(
715
+ data.frame(Message = "No records found"),
716
+ options = list(pageLength = 20) # Show 20 records
717
+ ))
718
+ }
719
+
720
+ df <- df %>%
721
+ mutate(
722
+ inat_link = paste0(
723
+ "<a href='https://www.inaturalist.org/observations/",
724
+ id, "' target='_blank'>", id, "</a>"
725
+ )
726
+ )
727
+
728
+ photo_col <- "taxon.default_photo.square_url"
729
+ if (photo_col %in% names(df)) {
730
+ df$image_thumb <- ifelse(
731
+ !is.na(df[[photo_col]]) & df[[photo_col]] != "",
732
+ paste0("<img src='", df[[photo_col]], "' width='50'/>"),
733
+ "No Img"
734
+ )
735
+ } else {
736
+ df$image_thumb <- "No Img"
737
+ }
738
+
739
+ show_cols <- c(
740
+ "inat_link", "image_thumb", "taxon.name", "created_at_details.date",
741
+ setdiff(names(df), c("inat_link", "image_thumb", "taxon.name", "created_at_details.date"))
742
+ )
743
+
744
+ DT::datatable(
745
+ df[, show_cols, drop = FALSE],
746
+ escape = FALSE,
747
+ options = list(pageLength = 20, autoWidth = TRUE) # Page 50 prior
748
+ )
749
+ })
750
+
751
+ # Download handlers
752
+ output$downloadTop90 <- downloadHandler(
753
+ filename = function() {
754
+ paste0("inat_dead_top90_", Sys.Date(), ".csv")
755
+ },
756
+ content = function(file) {
757
+ req(result_data())
758
+ readr::write_csv(result_data()$merged_df, file)
759
+ }
760
+ )
761
+
762
+ output$downloadAll <- downloadHandler(
763
+ filename = function() {
764
+ paste0("inat_dead_ALL_", Sys.Date(), ".csv")
765
+ },
766
+ content = function(file) {
767
+ req(result_data())
768
+ readr::write_csv(result_data()$merged_df_all, file)
769
+ }
770
+ )
771
+ }
772
+
773
+ shinyApp(ui = ui, server = server)