Lobster Ecology Region of Interest Timeseries Processing

Processing Regional Timeseries of Surface and Bottom Temperatures

Published

October 20, 2025

About: Processing Timeseries of Surface/Bottom Temperature+Salinity

For this project we need to use FVCOM for point-value interpolations (what was the temperature at this location, at this time), and for regional summaries (what was the average temperature over time for this area).

This document steps through the approach to getting regional sumaries for our areas of interest. The areas of interest for this project are a combination of nearshore areas (12nm buffers from shore intersections with lobster management strata) and offshore regions (Gulf of Maine, Georges Bank, EPUS).

Code
####. packages. ####
library(gmRi)
library(tidyverse)
library(sf)
library(rnaturalearth)
library(fvcom)
library(ncdf4)

# Set the theme
theme_set(theme_bw() + map_theme())

# Project paths
lob_ecol_path <- cs_path("mills", "Projects/Lobster ECOL")
fvcom_path <- cs_path("res", "FVCOM/Lobster-ECOL")
poly_paths <- cs_path("mills", "Projects/Lobster ECOL/Spatial_Defs")

# Shapefiles
new_england <- ne_states("united states of america", returnclass = "sf") %>% 
  filter(postal %in% c("VT", "ME", "RI", "MA", "CT", "NH", "NY", "MD", "VA", "NJ", "DE", "NC", "PA", "WV"))
canada <- ne_states("canada", returnclass = "sf")
Code
# Use GMRI style
use_gmri_style_rmd()

Load Regional Shapefiles

Regions of interest are looking at temperature dynamics ar two spatial scales:

1.a nearshore scale within 12nm of the coast, which is further sub-divided by NMFS statistical areas.
2. The second is an offshore scale extending beyond 12nm to the shelf break. This area is split into Southern New England and a Northern Gulf of Maine + Georges Bank region.

Code
# clusters of statistical areas that align loosely with geography and management areas
inshore_areas <- read_sf(str_c(poly_paths,"spatial_defs_2025/12nm_poly_statarea_merge.shp")) %>% 
  janitor::clean_names() %>% 
  mutate(
    area_type = "nearshore-coastal",
    area_id = tolower(short_name))

# ecological production units
offshore_areas <- read_sf(str_c(poly_paths,"spatial_defs_2025/sne_gom_tocoast.shp"))  %>% 
  janitor::clean_names() %>% 
  mutate(
    area_type = "offshore-regional",
    area_id = tolower(region))

# Combine them
all_areas <- bind_rows(
  st_transform(dplyr::select(inshore_areas, area_id, geometry), st_crs(offshore_areas)), 
  dplyr::select(offshore_areas, area_id, geometry)
)


# Map everything
ggplot() +
  geom_sf(data = all_areas, aes(fill = area_id), alpha = 0.4) +
  geom_sf(data = new_england) +
  geom_sf(data = canada) +
  theme(legend.position = "right") +
  scale_fill_gmri() +
  coord_sf(xlim = c(-78, -66), ylim = c(35.5, 45)) +
  theme_bw() + map_theme() +
  labs(fill = "Area")

Code
# # Load inshore
# inshore_areas <- map(
#   setNames(
#     list.files(str_c(poly_paths, "inshore_areas"), full.names = T),
#     str_remove(list.files(str_c(poly_paths, "inshore_areas")), ".geojson")),
#   function(x){read_sf(x)})
# 
# # Load offshore
# offshore_areas <- map(
#   setNames(
#     list.files(str_c(poly_paths, "offshore_areas"), full.names = T),
#     str_remove(list.files(str_c(poly_paths, "offshore_areas")), ".geojson")),
#   function(x){read_sf(x)})
# 
# 
# # Map everything
# ggplot() +
#   geom_sf(data = bind_rows(inshore_areas), aes(fill = area_id), alpha = 0.4) +
#   geom_sf(data = bind_rows(offshore_areas), aes(fill = Region), alpha = 0.4) +
#   geom_sf(data = new_england) +
#   geom_sf(data = canada) +
#   theme(legend.position = "right") +
#   scale_fill_gmri() +
#   coord_sf(xlim = c(-78, -66), ylim = c(35.5, 45)) +
#   theme_bw() + map_theme() +
#   labs(fill = "Area")

Open Daily Data

Daily surface and bottom temperatures were obtained through direct communication with the Dr. Chen’s group at UMASS Dartmouth, with special thanks for Drs. Wang & Li for their help and continued correspondence.

From these daily-averagede NetCDF files and using the FVCOM r-package it should be possible to load the GOM3 triangular mesh as a simple features dataframe into R.

Code
# Here are the files we have, loop over them later
fvcom_surfbot_files <- setNames(
  list.files(fvcom_path, full.names = T, pattern = ".nc"),
  str_remove(list.files(fvcom_path, full.names = F, pattern = ".nc"), ".nc"))


# Test File: GOM3 1978
# Load some daily FVCOM that we downloaded and averaged
fvcom_yrx <- nc_open(fvcom_surfbot_files["gom3_1978"])



# Get the mesh itself as a simple feature collection
gom3_mesh <- get_mesh_geometry(fvcom_yrx, what = 'lonlat')


# Map everything
ggplot() +
  geom_sf(data = gom3_mesh, alpha = 0.4, linewidth = 0.1) +
  geom_sf(data = all_areas, aes(fill = area_id), alpha = 0.4) +
  geom_sf(data = new_england) +
  geom_sf(data = canada) +
  theme(legend.position = "right") +
  scale_fill_gmri() +
  coord_sf(xlim = c(-78, -65), ylim = c(35.5, 45)) +
  theme_bw() + map_theme() +
  labs(
    title = "Coverage overlap of FVCOM and study regions",
    fill = "Area")

Overlay Regions

For each region we will need to perform an intersection with the FVCOM mesh. From that geoprocessing operation we can then determine the node & element ID’s to pull out of the netcdf files.

At this step we can also calculate relative areas of resulting triangles and fractions of triangles to use for weighted-averages for regional summaries.

Code
# Nodes can be acquired this way
node_ids <- fvcom_nodes(fvcom_yrx) %>% 
  rename(node_id = node) %>% 
  st_as_sf(coords = c("lon", "lat"), crs = 4326)

# Center element ids can be gained this way
elem_ids <- fvcom_elems(fvcom_yrx) %>% 
  st_as_sf(coords = c("lon", "lat"), crs = 4326)

# # We want both the ids to join, but we also want to crop it...
# inshore_trans <- map(inshore_areas, ~st_transform(.x, crs = 6318))
# offshore_trans <- map(offshore_areas, ~st_transform(.x, crs = 6318))
all_areas_trans <- all_areas %>% 
  split(.$area_id) %>% 
  map(~st_transform(.x, crs = 6318))

# Map what is going on in the next steps
# everything below steps through the steps
crop_t <- st_intersection(gom3_mesh, all_areas_trans$`eastern maine`) 


# Map what is happening:
ggplot() +
  geom_sf(
    data = all_areas_trans$`eastern maine`, 
    aes(color = "Eastern Maine"), linewidth = 1,
    fill = "transparent") +
  geom_sf(
    data = crop_t, 
    aes(color = "FVCOM Mesh"), 
    alpha = 0.5) +
  geom_sf(data = new_england) +
  scale_color_gmri() +
  coord_sf(xlim = c(-68.2, -67), ylim = c(44.2, 45.4)) +
  labs(
    title = "Assigning node/elements and area-weights to regions",
    subtitle = "st_intersection(mesh, area_poly)",
       color = "Geometry Source")

Perform Intersections on All Polygons

For each area of interest we need to identify the FVCOM node/element indices that fall within each of them. This only needs to be done once, the indexing will apply correctly at each timestep.

Code
# Run them all and save tables in- case we need to operate in python
area_intersections <- map_dfr(all_areas_trans, function(x){
  
  # Run intersection
  mesh_clip <- st_intersection(gom3_mesh, x) 
  
  # Get areas
  # Pull necessary information out
  mesh_clip_df <- mesh_clip %>% 
    mutate(rel_area = st_area(mesh_clip)) %>% 
    select(area_id, elem, p1, p2, p3, rel_area) %>% 
    st_drop_geometry()
  
  # Return
  return(mesh_clip_df)
  
})

# Save the new ones
write_csv(area_intersections, here::here("local_data/new_areas_mesh_weights.csv"))


# # Save these out for lookup tables
# write_csv(inshore_intersections, here::here("local_data/inshore_areas_mesh_weights.csv"))
# write_csv(offshore_intersections, here::here("local_data/offshore_areas_mesh_weights.csv"))
Code
# Load what we did above
# inshore_zones <- read_csv(here::here("local_data/inshore_areas_mesh_weights.csv"))
# offshore_zones <- read_csv(here::here("local_data/offshore_areas_mesh_weights.csv"))
regional_assignments <- read_csv(here::here("local_data/new_areas_mesh_weights.csv"))


# # Put the inshore and offshore together
# regional_assignments <- bind_rows(
#     dplyr::select(inshore_zones, area_id = SHORT_NAME, elem, p1, p2, p3, rel_area),
#     dplyr::select(offshore_zones, area_id = Region, elem, p1, p2, p3, rel_area)) %>% 
#   mutate(rel_area = as.numeric(rel_area))


# From intersection we need to add areas
# mutate(crop_t, rel_area = st_area(crop_t), .before = "Join_Count") %>% 
#   select(SHORT_NAME, Id_1, FULL_NAME, elem, p1, p2, p3, rel_area) %>% 
regional_assignments %>% 
  st_drop_geometry() %>% 
  head() %>% 
  gt::gt() %>% gt::tab_header(
    title = "Intersection Insight gained:",
    subtitle = "Information needed to subset from NetCDF and weight appropriately for regional statistics")
Intersection Insight gained:
Information needed to subset from NetCDF and weight appropriately for regional statistics
area_id elem p1 p2 p3 rel_area
central maine 50460 25714 25713 25412 3801345
central maine 50461 25715 25714 25412 12622406
central maine 50462 25715 25412 25716 1845053
central maine 51042 26015 26014 25713 2865162
central maine 51043 26015 25713 25714 12898921
central maine 51044 26016 26015 25714 14960550

Get triangle-specific averages of nodal values

For surface and bottom temperatures the values are stored/calculated at the triangle vertices, or nodes. For regional statistics like the regional average temperature, we need to average the three nodes to get a value representative of the interior space.

This step loops through all the regional elements that are within any of our regions and processes that value once for each. These are then stored in a named list that can be accessed later.

Code
# Now we want to pull all values on the time dimension 
# for the key nodes we need, the ones that surround all unique
# elements after the st_intersecction()



#' @Title FVCOM Element Average from Vertices
#' 
#' 
#' @description Get the average value from the relevant vertices, along  
#' the time dimension for FVCOM mesh triangles.
#'
#' @param mesh_df 
#' @param nc
#' @param nc_varname 
#'
#' @return
#' @export
#'
#' @examples
regional_from_nodes <- function(fvcom_mesh_trios, nc, nc_varname = "surface_t"){
  
  
  # Take the unique elements from our table:
  unique_elems <- distinct(fvcom_mesh_trios, elem) %>% pull(elem)
  
  # Make a named list
  unique_elems <- setNames(unique_elems, unique_elems)
  
  # Iterate through each unique element (triangle element, p1, p2, p3)
  # Average the nodes to get value for each element
  triangle_temps <- map(unique_elems, function(elem_id){
    
    # Slice the row for that element, get distinct in case >1
    elem_df <- filter(regional_assignments, elem == elem_id) %>% 
      distinct(elem, p1, p2, p3)
    
    # Get element id
    elem_num <- elem_df[[1, "elem"]]
    
    # Pull surface_t for the three nodes
    p1_ts <- ncvar_get(
      nc, 
      varid = nc_varname, 
      start = c(elem_df[[1, "p1"]], 1), 
      count = c(1, -1))
    p2_ts <- ncvar_get(
      nc, 
      varid = nc_varname, 
      start = c(elem_df[[1, "p2"]], 1), 
      count = c(1, -1))
    p3_ts <- ncvar_get(
      nc, 
      varid = nc_varname, 
      start = c(elem_df[[1, "p3"]], 1), 
      count = c(1, -1))
    
    # Get Averages
    elem_avg_var <- (p1_ts + p2_ts + p3_ts) / 3
    
    # Spit it out, no dataframe bs
    unique_elems[elem_num] <- elem_avg_var
    
  })
  
}
Code
# Run that for all the years I guess?!?
triangle_avg_1978 <- regional_from_nodes(
  fvcom_mesh_trios = regional_assignments, 
  nc = fvcom_yrx, 
  nc_varname = "surface_t")
# Sys.time()

Area-weighted Regional Averages

Within each polygon overlay, we want to weight the averages of each triangle we just estimated by their relative areas. This will give us the average value for the entire polygon, appropriately weighting each of the component areas.

Code
# Take the code above and calculate the weighted averages:
# function should take:
# 1. region overlay dataframe, containing relative areas for weights
# 2. triangle-specific values





#' @title Regional Averages from FVCOM-Polygon Intersection
#' 
#' @description
#'
#' @param mesh_poly_intersection 
#' @param regional_means 
#' @param nc 
#'
#' @return
#' @export
#'
#' @examples
fvcom_regional_averages <- function(mesh_poly_intersection, regional_means, nc){
  
  # Get the elements
  poly_elems <- as.character(mesh_poly_intersection$elem)

  # pull out the weights, name them
  poly_wts <- as.numeric(mesh_poly_intersection$rel_area) %>% 
    setNames(poly_elems)
  
  # Weight each timeseries by area, then sum them all
  # Get the timeseries that go with the triangles
  # multiply the relevant relative areas
  poly_wtd_ts <- purrr::map(poly_elems, function(x){
    wtd_vals <- regional_means[[x]] * poly_wts[[x]]
    return(wtd_vals)}) %>% 
    reduce(.x = ., .f = `+`)
  
  # Divide by total area
  poly_tot_area <- sum(as.numeric(mesh_poly_intersection$rel_area))
  
  # Add the time dimension and return as dataframe
  poly_regional <- data.frame(
    "time"     = as.Date(ncvar_get(nc, "Times")),
    "regional_mu" = poly_wtd_ts / poly_tot_area)
  return(poly_regional)
  
}
Code
#------

# Why is this not working...
# Works great on one table at a time
# 
# fvcom_regional_averages(
#       mesh_poly_intersection = filter(regional_assignments, area_id == "Eastern Maine"),
#       regional_means = triangle_avg_1978, 
#       nc = fvcom_yrx)




# Now we loop over areas
poly_avgs_1978 <-  map_dfr(
  .x = regional_assignments %>% split(.$area_id),
  .f = ~fvcom_regional_averages(
    mesh_poly_intersection = .x,
    regional_means = triangle_avg_1978, 
    nc = fvcom_yrx),
  .id = "area_id"
)


mutate(poly_avgs_1978, time = as.Date(time)) %>% 
ggplot(aes(time, regional_mu, color = area_id)) +
  geom_line(alpha = 0.4, linewidth = 0.5) +
  labs(title = "Average SST for Lob-ECOL Polygons: 1978 Test")

Run for All Years, Save.

Before this step, need to perform the st_intersection step that gathers the information on which mesh triangles to get averages from, and how to weight them appropriately.

Then we’re just looping through opening the files for each year and the dataframes for each area.

Everything below could be done in one step instead of a loop with xarray

Surface Temperature Processing

Code
# Process Surface Temperature for all the regions:
fvcom_var_name <- "surface_t"


# Time it
Sys.time()
all_region_surface_temp <- map_dfr(fvcom_surfbot_files, function(nc_x){
  
  # 1. Open the netcdf:
  yr_x_fvcom <- nc_open(nc_x)
  
  # 2. Get the average values for the triangles
  # Run that for all the years I guess?!?
  triangle_avgs <- regional_from_nodes(
    fvcom_mesh_trios = regional_assignments, 
    nc = yr_x_fvcom, 
    nc_varname = fvcom_var_name)
  
  
  # Get the regional averages for each region/group
  poly_avgs_yr_x <-  regional_assignments %>% 
    split(.$area_id) %>% 
    map_dfr(
    .x = .,
    .f = ~fvcom_regional_averages(
      mesh_poly_intersection = .x,
      regional_means = triangle_avgs, 
      nc = yr_x_fvcom),
    .id = "area_id"
  )
  
  # Close the netcdf connection
  nc_close(yr_x_fvcom)
  
  # Return the regional averages
  return(poly_avgs_yr_x)
  
})

# time completion
Sys.time()



# Plot
all_region_surface_temp %>%
  mutate(time = as.Date(time)) %>%
  ggplot() +
  geom_line(aes(time, regional_mu, color = area_id),
            linewidth = 0.4, alpha = 0.4)

Bottom Temperature Processing

Code
# Process Surface Temperature for all the regions:
fvcom_var_name <- "bottom_t"


# Time it
Sys.time()
all_region_bottom_temp <- map_dfr(fvcom_surfbot_files, function(nc_x){
  
  # 1. Open the netcdf:
  yr_x_fvcom <- nc_open(nc_x)
  
  # 2. Get the average values for the triangles
  # Run that for all the years I guess?!?
  triangle_avgs <- regional_from_nodes(
    fvcom_mesh_trios = regional_assignments, 
    nc = yr_x_fvcom, 
    nc_varname = fvcom_var_name)
  
  
  # Get the regional averages for each region/group
  poly_avgs_yr_x <-  regional_assignments %>% 
    split(.$area_id) %>% 
    map_dfr(
    .x = .,
    .f = ~fvcom_regional_averages(
      mesh_poly_intersection = .x,
      regional_means = triangle_avgs, 
      nc = yr_x_fvcom),
    .id = "area_id"
  )
  
  # Close the netcdf connection
  nc_close(yr_x_fvcom)
  
  # Return the regional averages
  return(poly_avgs_yr_x)
  
})

# time completion
Sys.time()



# Plot
all_region_bottom_temp %>% 
  mutate(time = as.Date(time)) %>% #distinct(area_id)
  filter(area_id %in% c("gom_gbk", "sne")) %>% 
  ggplot() +
  geom_line(aes(time, regional_mu, color = area_id),
            linewidth = 0.4, alpha = 0.4)

Exporting

Combine surface and bottom outputs into one table for export:

Code
# Combine surface and bottom to save:

regional_temperature_collection <- left_join(
  all_region_surface_temp %>% rename(surface_t = regional_mu),
  all_region_bottom_temp %>% rename(bottom_t = regional_mu))



# Export the set:
write_csv(regional_temperature_collection, 
          str_c(lob_ecol_path, "FVCOM_processed/area_timeseries/new_regions_fvcom_temperatures_daily.csv"))

Review the collection

Code
regional_temperature_collection <- read_csv(
  str_c(lob_ecol_path, "FVCOM_processed/area_timeseries/new_regions_fvcom_temperatures_daily.csv")
)


ggplot(filter(regional_temperature_collection, str_detect(area_id,"maine")), aes(time)) +
  geom_line(aes(y = surface_t, color = "surface temperature")) +
  geom_line(aes(y = bottom_t, color = "bottom temperature")) +
  scale_x_date(limits = as.Date(c("2016-01-01", "2019-12-31"))) +
  facet_wrap(~area_id, ncol = 1) +
  theme(legend.position = "bottom")

Code
# # This is what the collection export used to be:
# 
# # Load the collection if necessary: (should look like this)
# regional_temperatures_collection <- read_csv(str_c(lob_ecol_path, "FVCOM_processed/area_timeseries/all_regions_fvcom_temperatures_daily.csv"))
# 
# glimpse(regional_temperatures_collection)
# 
# # Plot to look
# fvcom_regional_temperatures %>% 
#   filter(area_id %in% c("GOM_GBK", "SNE")) %>% 
#   ggplot() +
#   geom_line(aes(time, surface_t, color = "Surface Temperature"), linewidth = 0.8, alpha = 0.6) +
#   geom_line(aes(time, bottom_t, color = "Bottom Temperature"), linewidth = 0.8, alpha = 0.6) +
#   scale_color_gmri() +
#   facet_wrap(~area_id, nrow = 2)
# 
# 
# # # Exporting
# # write_csv(
# #   fvcom_regional_temperatures,
# #   str_c(lob_ecol_path, "FVCOM_processed/area_timeseries/all_regions_fvcom_temperatures_daily.csv"))

Running Single Years 2012/2017-2019

On the first pass when I first coded this up I was missing a number of files which were later added. This section covers processing a subset of years and adding them back to the collection.

This is a good example of what the process looks likg for a single NetCDF file.

Code
# Paths to single years
fvcom_2012_file <- str_c(fvcom_path, "gom3_2012.nc")
fvcom_2017_file <- str_c(fvcom_path, "gom3_2017.nc")
fvcom_2018_file <- str_c(fvcom_path, "gom3_2018.nc")
fvcom_2019_file <- str_c(fvcom_path, "gom3_2019.nc")




# 1. Open the netcdf file to update/add:
fvcom_year_nc <- nc_open(fvcom_2019_file)


# 2. Get the average values for the triangles
# Run that for all the years I guess?!?
stemp_triangle_avgs <- regional_from_nodes(
  fvcom_mesh_trios = regional_assignments, 
  nc = fvcom_year_nc, 
  nc_varname = "surface_t")

# bottom as well
btemp_triangle_avgs <- regional_from_nodes(
  fvcom_mesh_trios = regional_assignments, 
  nc = fvcom_year_nc, 
  nc_varname = "bottom_t")


# 3. Get the regional averages for each region/group
stemp_poly_avgs <-  regional_assignments %>% 
  split(.$area_id) %>% 
  map_dfr(
  .x = .,
  .f = ~fvcom_regional_averages(
    mesh_poly_intersection = .x,
    regional_means = stemp_triangle_avgs, 
    nc = fvcom_year_nc),
  .id = "area_id")

# Btemp
btemp_poly_avgs <-  regional_assignments %>% 
  split(.$area_id) %>% 
  map_dfr(
  .x = .,
  .f = ~fvcom_regional_averages(
    mesh_poly_intersection = .x,
    regional_means = btemp_triangle_avgs, 
    nc = fvcom_year_nc),
  .id = "area_id")



# Reshape/format them to put together
# Put them into one table:
yearx_regional_temperatures <- left_join(
  stemp_poly_avgs  %>% 
    mutate(time = as.Date(time)) %>% 
    rename(surface_t = regional_mu), 
  btemp_poly_avgs  %>% 
    mutate(time = as.Date(time)) %>% 
    rename(bottom_t = regional_mu))




# Old export code, from original areas

# # Load the collection if necessary:
# fvcom_regional_temperatures <- read_csv(str_c(lob_ecol_path, "FVCOM_processed/area_timeseries/all_regions_fvcom_temperatures_daily.csv"))
# 
# 
# # Put them into one table to save them:
# fvcom_regional_temperatures <- bind_rows(fvcom_regional_temperatures, yearx_regional_temperatures)
# 
# # Plot to look
# fvcom_regional_temperatures %>% 
#   filter(area_id %in% c("GOM_GBK", "SNE")) %>% 
#   ggplot() +
#   geom_line(aes(time, surface_t, color = "Surface Temperature"), linewidth = 0.8, alpha = 0.6) +
#   geom_line(aes(time, bottom_t, color = "Bottom Temperature"), linewidth = 0.8, alpha = 0.6) +
#   scale_color_gmri() +
#   facet_wrap(~area_id, nrow = 2)
# 
# 
# # # Exporting
# # write_csv(
# #   fvcom_regional_temperatures,
# #   str_c(lob_ecol_path, "FVCOM_processed/area_timeseries/all_regions_fvcom_temperatures_daily.csv"))

Checking Model Resolution: Nodes/Element Density

As another layer of information, it may be useful to know how many nodes/elements (proxies for resolution) contributed to mean estimates:

Code
# This gets the average area of the polygons within the strata
area_resolutions <- regional_assignments %>% 
  group_by(area_id) %>% 
  summarise(n_elems = n_distinct(elem),
            avg_area_km2 =  mean(rel_area)/1e6) 

# We also probably want the number of nodes, and center elements
# we can get the center coordinates this way
gom3_mesh_elems <- data.frame(
  "lonc" = ncvar_get(fvcom_yrx, "lonc"),
  "latc" = ncvar_get(fvcom_yrx, "latc")) %>% 
  mutate(elem = row_number()) %>% 
  st_as_sf(
    coords = c("lonc", "latc"), 
    crs = st_crs(gom3_mesh), 
    remove = F)

# and the nodes this way
gom3_mesh_nodes <- data.frame(
  "lon" = ncvar_get(fvcom_yrx, "lon"),
  "lat" = ncvar_get(fvcom_yrx, "lat")) %>% 
  mutate(node = row_number()) %>% 
  st_as_sf(
    coords = c("lon", "lat"), 
    crs = st_crs(gom3_mesh), 
    remove = F)

# validate we have the coordinates right
ggplot() +
  geom_sf(data = gom3_mesh_nodes, shape = 3, size = 0.2, alpha = 0.4, color = "gray20") +
  geom_sf(data = gom3_mesh_elems, shape = 3, size = 0.2, alpha = 0.2, color = "orange")

Code
# Use crop to count the points within each one
area_points <- map_dfr(
  .x = all_areas_trans, 
  .f = possibly(function(strata_x){
  n_nodes <- st_crop(x = gom3_mesh_nodes, y = strata_x) %>% 
      nrow()
  n_centers <- st_crop(x = gom3_mesh_elems, y = strata_x) %>% 
      nrow()
  tibble(
    "n_nodes" = n_nodes,
    "n_centers" = n_centers)
  }, 
  otherwise = tibble("n_nodes" = 0, "n_centers" = 0)), 
  .id = "area_id")



# totally missed saving it, use the new trick i learned
#strata_points <- .Last.value



# Join them together:
area_resolution_data <- left_join(
  mutate(area_resolutions, area_id = as.character(area_id)),
  y = area_points)

# Check the distribution
#hist(strata_resolution_data$avg_area_km2)

# Join the avg resolution in each strata
all_areas %>% 
  left_join(area_resolution_data) %>% 
  ggplot() +
  #geom_sf(data = gom3_mesh, color = "gray80", alpha = 0.2) +
  geom_sf(aes(fill = avg_area_km2)) +
  facet_wrap(~area_id) +
  scale_fill_distiller(palette = "RdYlGn")

Code
# # Save that out: legacy code
# write_csv(strata_resolution_data, here::here("local_data", "jkipp_areas_fvcom_elem_resolution.csv"))