Approach for Fast Interpolation of Values within FVCOM Mesh
Published
October 7, 2024
Code
# Packages{library(raster) # netcdf data as rasterlibrary(sf) # vector spatial mapping/operationslibrary(fvcom) # fvcom mesh and variable extractionslibrary(ncdf4) # netcdf supportlibrary(tidyverse) # data wrangling and plottinglibrary(gmRi) # color schemes and cloud storage pathslibrary(patchwork) # plot arrangementlibrary(rnaturalearth) # coastlines and state polygonslibrary(geometry) # bathycentric coordinateslibrary(Matrix) # matrix algebralibrary(sysfonts) # font support}# Paths + conflictsconflicted::conflict_prefer("select", "dplyr")conflicted::conflict_prefer("filter", "dplyr")proj_path <-cs_path("mills", "Projects/Lobster ECOL")# fvcom_out <- str_c(proj_path, "FVCOM_support/")source(here::here("R/FVCOM_Support.R"))theme_set(theme_bw() +map_theme())# Load a maine polygonmaine <-ne_states("united states of america", returnclass ="sf") %>%filter(postal %in%c("ME", "NH", "MA", "CT", "RI", "VT"))# Gom EPU to clipres_shapes <-cs_path("res", "Shapefiles")epu_path <-str_c(res_shapes, "EPU/")gom_poly <-read_sf(str_c(epu_path, "individual_epus/GOM.geojson"))
Code
# Use GMRI styleuse_gmri_style_rmd()
Approaches to Interpolate Values within Triangular Mesh
Calculating values for points within the FVCOM mesh involves some sort of interpolation. Potential approaches for achieving this may exclusively incorporate local information from the three nearest nodes (ex. nearest neighbor, inverse distance weighting, linear interpolation, barycentric coordinates) or instead model variance over changing distance using information across many nodes (ex. Inverse Distance Weighting IDW, Kriging).
This quarto explores the applications of Linear interpolation & Barycentric Interpolation to interpolate values at many point locations across the FVCOM mesh area.
These approaches take information from the three surrounding nodes and interpolates a value based on either a linear combination of the vertices coordinates or by through the use of barycentric coordinates weights.
These are both deterministic approaches that should perform quickly without overdue computational burdens and avoid decision making processes around parameter tuning.
To first step for either workflow involves identifying the triangles that surround points we wish to interpolate, and extract from the source NetCDF file the variable values for the relevant three nodes of that triangle.
To demonstrate the approach the following 100 points have been simulated at random. They occur during the time period that FVCOM’s GOM3 mesh was in use, which we will use as the mesh for testing below.
Code
# Make a dataframe of random pointsset.seed(123)point_number <-100pts_df <-data.frame(pt_id =str_c("station_", seq(1, point_number)),lon =rnorm(point_number, mean =-68.5, sd =0.3),lat =rnorm(point_number, mean =43.2, sd =0.3),dtime =seq.Date(as.Date("2014-01-09"), as.Date("2016-06-12"), length.out = point_number)) %>%st_as_sf(coords =c("lon", "lat"), crs =4326, remove =FALSE)
The fvcom package created by Ben Tupper. The fvcom::get_mesh_geometry function provides functionality for constructing a simple feature collection for the FVCOM mesh which can be used for spatial operations. This simple features collection importantly includes point and element ID #’s that can be used to label which trianglar regions in the mesh should be associated with the points we wish to interpolate at.
# Load some daily FVCOM that we downloaded and averaged ahead of time# These aren't 100% reliable but they will work for demonstration sakedaily_fpath <-cs_path("mills", "Projects/Lobster ECOL/FVCOM_processed/ignore_manually_downloaded/ignore_GOM3_daily")gom3_2014 <-read_csv(str_c(daily_fpath, "GOM3_daily_surfbottemp_2014_01.csv"))gom3_oneday <- gom3_2014 %>%filter(date =="2014-01-01")# Get some file names to load monthly filesgom3_files <-monthly_fvcom_inventory(start_yr =2014,end_yr =2014,fvcom_folder =cs_path("res", "FVCOM/monthly_means"),fvcom_vers ="GOM3") # Open onegom3_early <-nc_open(gom3_files[[1]])# Get the mesh itself as a simple feature collectiongom3_mesh <-get_mesh_geometry(gom3_early, what ='lonlat')
Below is a map of our simulated test locations and where they fall within the context of the FVCOM mesh within the Gulf of Maine Ecological Production Unit.
Code
# Prep CRS to clipgom_poly <-st_transform(gom_poly, st_crs(gom3_mesh))# Flag the locations that are within the domaingom_mesh <- gom3_mesh %>%st_join(gom_poly, join = st_within) %>%drop_na(EPU) %>% dplyr::select(-c(EPU, Shape_Leng, Shape_Area, source)) # Plot them allggplot() +geom_sf(data = gom_mesh, alpha =0.1, linewidth =0.1) +geom_sf(data = maine) +geom_sf(data = pts_df) +coord_sf(xlim =c(-71, -67.1), ylim =c(41.4, 44.4)) +labs(title ="Simulated sample locations")
Step 1: Triangle Overlap Point-Matching
Since the mesh is static, we can use any time index to pull their locations and identify which triangles are associate with which points.
This operation can be performed for all points in one step using sf::st_join(, join = st_within).
Once the three relevant nodes are identified we can calculate barycentric coordinates which can be used to weight the values at each node.
Code
# We can use the mesh from Ben's function as the lookup table, # and elem numbers as triangle ID's# A bonus of going this route is that NODE and element ID numbers match ncvar_get index order# The meshgom3_mesh <-get_mesh_geometry(gom3_early, what ='lonlat') # We can also pull lon + lat from the netcdf as well and index that waygom3_coords <-data.frame("lon"=ncvar_get(gom3_early, "lon"),"lat"=ncvar_get(gom3_early, "lat")) %>%mutate(node_id =row_number(), .before ="lon")# Identify the triangles that overlap each point:# Use st_join to assign elem to the points and their we're looking uppts_assigned <-st_join(st_transform(pts_df, st_crs(gom3_mesh)), gom3_mesh, join = st_within)#### Getting Node Coordinates:# 1. Option 1: # Find triangle and its nodes that overlap, look the coords by p1, p2, p3 index number# Following the join The geometry is still the point here, # but now we know have columns for the node and center index numbers# 2. Option 2: Use triangle geometries themselves to pull node coordinates# We can use the triangles to pull coordinates from the sf geometries# What we really want are the point coordinates for barycentric distances# remember, lon & lat come in pairs so the index number grabs both# Using the values added to pts_assigned we can subset the relevant trianglestriangle_matches <- gom3_mesh[pts_assigned$elem,]
The following plot/code verifies that the random points indeed fall within the triangle they are matched to, and that the node ID’s from that triangle match the index order of ncvar_get() for lat/lon based variables.
Code
#### Sandbox Testing: # # Are the coordinates even different if projected to NAD83 or not? don't think so...# Ignore for now# pts_assigned %>% st_coordinates() %>% bind_cols(pts_df) %>% transmute(x_diff = lon - X, y_diff = lat - Y) # Nodes for one triangle# st_coordinates(triangle_matches[1,])[1:3,]# Do they also match the daily data row indices?# Hell yea, they do in space# need to triple check the order is fine# Seems Good!row_idx <-sample(c(1:nrow(pts_assigned)), size =1, replace = F )pt1_id = triangle_matches[row_idx, "p1"] %>%st_drop_geometry() %>%as.numeric()pt2_id = triangle_matches[row_idx,"p2"] %>%st_drop_geometry() %>%as.numeric()pt3_id = triangle_matches[row_idx,"p3"] %>%st_drop_geometry() %>%as.numeric()ggplot() +geom_point(data = gom3_oneday[pt1_id,], aes(lon, lat, color =str_c("Node: ", pt1_id)), size =3) +geom_point(data = gom3_oneday[pt2_id,], aes(lon, lat, color =str_c("Node: ", pt2_id)), size =3) +geom_point(data = gom3_oneday[pt3_id,], aes(lon, lat, color =str_c("Node: ", pt3_id)), size =3) +geom_point(data = pts_assigned[row_idx,], aes(lon, lat, color ="Test Point Location"), size =4) +scale_color_gmri() +geom_sf(data = triangle_matches[row_idx, ], color ="black", fill =NA, size =1) +labs(color ="Point", title ="Verify triangle overlap and node indexing of random point")
Code
# # But we need to make sure when we get distance the distance goes with the right point# # are they in order with st_coordinates?# # This is how we could match to lon;at coords:# pt_matches_x <- triangle_matches[row_idx, c("p1", "p2", "p3")] %>% # st_drop_geometry() %>% # t() %>% # c()# # And they do seem to match, sick# gom3_coords[pt_matches_x,]$lon == st_coordinates(triangle_matches[row_idx,])[1:3,"X"]# gom3_coords[pt_matches_x,]$lat == st_coordinates(triangle_matches[row_idx,])[1:3,"Y"]
Add node details to points table
After confirming that the point id’s from fvcom::get_mesh_geometry() correctly matched with the index order of ncdf4::ncvar_get(), we can confidently use those indices to quickly add the lat/lon coordinates of these nodes directly into the dataframe holding the points we wish to interpolate at.
This is purely organizational, and keeps all information together in a single object.
Code
# Objective:# Get the appropriate x1, x2, x3, y1, y2, y3, values into the "pnts_assigned"# can use either the mesh or use the lon/lat table, #### Option 2: Lon/Lat Table# The table is probably faster# Though I am worried if i hop between the two I might mix index order... idkgom3_coords <-data.frame("lon"=ncvar_get(gom3_early, "lon"),"lat"=ncvar_get(gom3_early, "lat")) %>%mutate(node_id =row_number(), .before ="lon")# This will add relevant coords to the tablepts_with_nearnodes <- pts_assigned %>%st_drop_geometry() %>%#rowwise() %>% mutate(x1 = gom3_coords[p1, "lon"],x2 = gom3_coords[p2, "lon"],x3 = gom3_coords[p3, "lon"],y1 = gom3_coords[p1, "lat"],y2 = gom3_coords[p2, "lat"],y3 = gom3_coords[p3, "lat"] )
Method 1: - Triangulation with Linear Interpolation
The following approach follows code shared by Dr. Siqi Li with UMASS Dartmouth:
This is the method they use when interpolating from one FVCOM mesh to another, or for extracting point estimates from the mesh.
Their code approaches the identification of the correct triangle using a different approach, but the use of matrix algebra to solve for the weights is the same.
Code
# Take inverse of matrix A %*% b# Original 3x3 matrix, each column is x, y, 1 for triangle nodes(a_mat <-matrix(c(8,3,4, 1,5,9, 6,7,2), nrow =3, byrow = T))# 3x1 matrix for point location within triangle(b_mat <-matrix(c(3,1,12), nrow =3, byrow = T))# Inverse of matrix Aa_i <-solve(a_mat)# Weightswts <- a_i %*% b_mat# Show that weights can be used to work back to point coordinatesa_mat %*% wts
The following function will perform a rowwise application of the above steps, adding the weights to the dataframe. This should make solving the interpolation a vectorized operation.
Code
# Function to add the linear interpolation weights based on node coordinatestriangulation_linear_weighting <-function(pts_sf, fvcom_mesh){# Identify the triangles that overlap each point:# Use st_join to assign elem to the points and their we're looking up pts_assigned <-st_join(st_transform(pts_sf, st_crs(fvcom_mesh)), gom3_mesh, join = st_within) %>%drop_na(elem)# Iterate over the rows to add weights: pts_weighted <- pts_assigned %>% base::split(., seq_len(nrow(.))) %>% purrr::map_dfr(function(pt_assigned){# Subset the relevant triangle from st_join info triangle_match <- fvcom_mesh[pt_assigned$elem,]# Build matrices for point to interpolate & of surrounding points:# Matrix for triangle# Use the triangles node coordinates from the sf geometries node_vertices <-t(st_coordinates(triangle_match[1,])[1:3,1:3])# Make matrix from the points: point_coords <-matrix(c(st_coordinates(pt_assigned[1,]), 1), nrow =3)#### For Linear Interpolation:# Get inverse of the matrix inverse_coordmat <-solve(node_vertices)# Solve for the weights node_wts <- inverse_coordmat %*% point_coords %>%t() %>%as.data.frame() %>%setNames(c("p1_wt", "p2_wt", "p3_wt"))# Return with dataframebind_cols(pt_assigned, node_wts) })# End Rowwisereturn(pts_weighted)}# Run the Jobpts_weighted <-triangulation_linear_weighting(pts_sf = pts_df, fvcom_mesh = gom3_mesh)
Code
#### Apply Interpolation Step ####interpolate_from_weights <-function(df_weighted, fvcom_nc, fvcom_varid, fvcom_siglev =1, var_out ="interp_val"){# Get the values of the variable of interest as vector node_vals <-ncvar_get(nc = fvcom_nc, varid = fvcom_varid)[,fvcom_siglev] df_weighted %>%mutate( {{var_out}} := node_vals[p1] * p1_wt + node_vals[p2] * p2_wt + node_vals[p3] * p3_wt )}# Run the interpolationpts_interp <-interpolate_from_weights(df_weighted = pts_weighted, fvcom_nc = gom3_early, fvcom_varid ="temp", fvcom_siglev =1,var_out ="surf_temp")
This is what the interpolated values look like within their respective triangulations:
Code
# Values for nodes to compare against# Get Values to index them outsurface_temperature <-ncvar_get(nc = gom3_early,varid ="temp")[,1]t_nodes <-unlist(st_drop_geometry(pts_assigned)[, c("p1", "p2", "p3")])actual_t <-data.frame("lon"= gom3_coords[t_nodes, "lon"],"lat"= gom3_coords[t_nodes, "lat"],"sst"= surface_temperature[t_nodes])# Plot the map:ggplot() +geom_sf(data = triangle_matches, fill =NA) +geom_point(data = actual_t, aes(lon, lat, color = sst)) +geom_sf(data = pts_interp, aes(color = surf_temp)) +scale_color_distiller(palette ="RdBu") +theme_dark()
Nearshore Area Regular Grid Test
Code
# Take the nearshore area of Gulf of Maine:inshore_sf <-st_read(str_c(proj_path, "Spatial_Defs/12nm_poly_statarea"), quiet = T)# Subset to maine areainshore_gom <-st_intersection(gom_poly, st_transform(inshore_sf, st_crs(gom_poly)))# plot# plot(inshore_gom$geometry)# Create a 4km grid using its min/max extent# st_bbox(st_transform(gom_poly), crs = 4326)# Clip to points within that area onlyinshore_reg_grid <-expand.grid(lon =seq(-71.1, -67.10, (1/24)),lat =seq(41.3, 44.62, (1/24))) %>%as.data.frame() %>%mutate(coord_pair =row_number(), .before ="lon") %>%st_as_sf(coords =c("lon", "lat"), crs =4326, remove =FALSE) %>%st_join(st_transform(inshore_gom, crs =4326),join = st_within) %>%drop_na(TARGET_FID)# # Plot the coverage:# ggplot(inshore_reg_grid) +# geom_sf(size = 0.2) +# labs(title = "1/24th Degree Inshore Area Target Resolution")# Get weightsinshore_weights <-triangulation_linear_weighting(pts_sf = inshore_reg_grid, fvcom_mesh = gom3_mesh)# Interpolateinshore_interp <-interpolate_from_weights(df_weighted = inshore_weights, fvcom_nc = gom3_early, fvcom_varid ="temp", fvcom_siglev =45,var_out ="bot_temp")# Plot the map:ggplot() +geom_sf(data = inshore_interp, aes(color = bot_temp), size =1, shape =15) +geom_sf(data = maine) +scale_color_distiller(palette ="RdBu") +theme_dark() +map_theme() +coord_sf(xlim =c(-71.5, -67.8), ylim =c(41.5, 44.6)) +labs(title ="1/24th Degree Inshore Area Target Resolution")
Code
# Plot
Method 2: Barycentric Interpolation
Getting Barycentric Weights
Now that we know where points fall and the coordinates for those points, we now just with their barycentric coordinates which will be used as weights.
Code
# We need triangle nodes and the point to interpolate as matricestriangle_coord_test <- triangle_matches[1,] %>%st_coordinates()triangle_coord_test <- triangle_coord_test[c(1:3), c(1:2)]simplex_test <-as.matrix(triangle_coord_test)# The point to interpolatepoint_test <- pts_assigned[1, c("lon", "lat")] %>%st_drop_geometry() %>%as.matrix()# Get barycentric coordinatesnode_numbers <- triangle_matches[1,c("p1", "p2", "p3")] %>%st_drop_geometry()as.data.frame(cart2bary(X = simplex_test,P = point_test)) %>%setNames(node_numbers)
25073
25074
25368
0.1209655
0.3431237
0.5359108
Assign Barycentric Weights
All the points can be done in row-wise operation this way. There definitely is a faster way to do this, but remember that we only need to do this once since the mesh doesn’t really change.
Code
# adapted from this, doesn't use trisearch# https://www.r-bloggers.com/2013/03/barycentric-interpolation-fast-interpolation-on-arbitrary-grids/# Function to do it for each unique row, fails if not uniquebarycentric_weights_from_row <-function(coordinates_df){# Make the simplex nodes_simplex <-matrix(as.numeric(c(coordinates_df[c("x1", "x2", "x3", "y1", "y2", "y3")])), nrow =3) node_indices <- coordinates_df[c("p1", "p2", "p3")]colnames(nodes_simplex) <-c("X", "Y")rownames(nodes_simplex) <- node_indices# Make matrix from the points: point_mat <-as.matrix(coordinates_df[1, c("lon", "lat")])# Get the coords bcoords_x <-as.data.frame(cart2bary(nodes_simplex, point_mat)) %>%setNames(c("p1_bary", "p2_bary", "p3_bary"))# Return with dataframebind_cols(coordinates_df, bcoords_x)}# # Test why it doesn't work:# coordinates_df <- pts_with_nearnodes %>% # split(.$pt_id) %>% # pluck(1)# barycentric_weights_from_row(coordinates_df = coordinates_df)# rm(coordinates_df)# Actually get all the weightsoverlaid_barycentric_weights <- pts_with_nearnodes %>%split(.$pt_id) %>%map_dfr(barycentric_weights_from_row)# # Print these# overlaid_barycentric_weights %>% # head() %>% # knitr::kable()
Step 3: Interpolate Values using Matrix Algebra
The following code takes the all the information we’ve gathered above and performs the interpolation:
Code
#### Interpolation Code Below ##### Apparently: Then we can sparse matrix it if we want fast interpolation...# # https://www.r-bloggers.com/2013/03/barycentric-interpolation-fast-interpolation-on-arbitrary-grids/# Define the interpolation as a sparse matrix operation. # Faster than using apply, probably slower than a C implementation# # The original code for this:# M <- sparseMatrix(# i = rep(1:nrow(Xi), each=3),# j = as.numeric(t(active)),# x = as.numeric(t(tri$p)),# dims = c(nrow(Xi), length(f)))# Xi was the df for new points# Active was the indices for the nodes# tri$p are the barycentric coords# f is matrix/array containing the values # My attempt to apply it to my stuff# Temperature Valuesstemp_vals <-ncvar_get(gom3_early, "temp")[,1]btemp_vals <-ncvar_get(gom3_early, "temp")[,45]# build sparse matrixM <-sparseMatrix(i =rep(1:nrow(overlaid_barycentric_weights), each =3),j =as.numeric(t(overlaid_barycentric_weights[,c("p1", "p2", "p3")])),x =as.numeric(t(overlaid_barycentric_weights[,c("p1_bary", "p2_bary", "p3_bary")])),dims =c(nrow(overlaid_barycentric_weights), length(stemp_vals)))# Matrix Multiply to Interpolateinterp_sst <-as.numeric(M %*% stemp_vals)interp_bt <-as.numeric(M %*% stemp_vals)
Plot Interpolation Results
The following plot shows the interpolated values for our simulated points, and the nodes used when interpolating values for those points.
The approach can be extended for any variable in the netcdf files, for example bottom temperature.
Code
# Combine with the pointsinterpolated_vals <-bind_cols( overlaid_barycentric_weights,data.frame("sst"= interp_sst),data.frame("bt"= interp_bt))# Values at relevant nodes onlyall_nodes <-unique(unlist(overlaid_barycentric_weights[,c("p1", "p2", "p3")]))node_sst <-data.frame("lon"= gom3_coords[all_nodes, "lon"],"lat"= gom3_coords[all_nodes, "lat"],"sst"= stemp_vals[all_nodes],"bt"= btemp_vals[all_nodes])# Values everywhere:gom3_early_temps <-bind_cols( gom3_coords,data.frame("sst"= stemp_vals),data.frame("bt"= stemp_vals), )# Plot something, chickenggplot() +geom_sf(data = triangle_matches, fill =NA, color ="white") +geom_point(data = node_sst, aes(lon, lat, color = sst, shape ="Original Mesh"), size =2, shape =17) +geom_point(data = interpolated_vals, aes(lon, lat, fill = sst, shape ="Interpolated Values"), size =2, shape =21, color ="black", show.legend = F) +scale_color_distiller(palette ="RdBu") +scale_fill_distiller(palette ="RdBu") +scale_shape_manual(values =c(17, 21)) +theme_dark() +labs(title ="Barycentric Interpolation",subtitle ="Sea Surface Temperature")
Offshore Area Regular Grid Test
One useful application of this approach is to convert from the irregular grid to a more regularly spaced grid. This may be useful for un-biased polygon overlays or for direct comparison with a regularly spaced product.
Another application of this approach is to convert from the more dense meshes of newer FVCOM versions like FVCOM-GOM5 to the sparser GOM3 mesh which has longer temporal coverage.
The following is what a regularly spaced resolution similar to GLORYs (1/12th * 1/12th degree) looks like.
Code
# Make a Regular Gridreg_grid <-expand.grid(lon =seq(-75.75, -56.75, 0.083),lat =seq(35.25, 46.25, 0.083)) %>%as.data.frame() %>%mutate(coord_pair =row_number(), .before ="lon") %>%st_as_sf(coords =c("lon", "lat"), crs =4326, remove =FALSE)# Use st_join to assign elem to the points and their we're looking uppts_assigned <-st_join(st_transform(reg_grid, st_crs(gom3_mesh)), gom3_mesh, join = st_within) %>%drop_na(elem, p1, p2, p3)# This will add relevant coords to the tablepts_with_nearnodes <- pts_assigned %>%st_drop_geometry() %>%mutate(x1 = gom3_coords[p1, "lon"],x2 = gom3_coords[p2, "lon"],x3 = gom3_coords[p3, "lon"],y1 = gom3_coords[p1, "lat"],y2 = gom3_coords[p2, "lat"],y3 = gom3_coords[p3, "lat"] )# Get the barycentric weightsoverlaid_barycentric_weights <- pts_with_nearnodes %>%split(.$coord_pair) %>%map_dfr(barycentric_weights_from_row)# Do interpolation:# Temperature Valuesstemp_vals <-ncvar_get(gom3_early, "temp")[,1]btemp_vals <-ncvar_get(gom3_early, "temp")[,45]M <-sparseMatrix(i =rep(1:nrow(overlaid_barycentric_weights), each=3),j =as.numeric(t(overlaid_barycentric_weights[,c("p1", "p2", "p3")])),x =as.numeric(t(overlaid_barycentric_weights[,c("p1_bary", "p2_bary", "p3_bary")])),dims =c(nrow(overlaid_barycentric_weights), length(stemp_vals)))# Matrix Multiply to Interpolateinterp_sst <-as.numeric(M %*% stemp_vals)interp_bt <-as.numeric(M %*% stemp_vals)# Combine with the pointsinterpolated_vals <-bind_cols( overlaid_barycentric_weights,data.frame("sst"= interp_sst),data.frame("bt"= interp_bt))# Values at relevant nodes onlyall_nodes <-unique(unlist(overlaid_barycentric_weights[,c("p1", "p2", "p3")]))node_sst <-data.frame("lon"= gom3_coords[all_nodes, "lon"],"lat"= gom3_coords[all_nodes, "lat"],"sst"= stemp_vals[all_nodes],"bt"= btemp_vals[all_nodes])
The following plot shoes the original FVCOM GOM3 irregular mesh data.
Code
# Plot somethingggplot() +# geom_sf(data = gom3_mesh, fill = NA, color = "white", linewidth = 0.1) +geom_point(data = node_sst, aes(lon, lat, color = sst), size =0.35) +scale_color_distiller(palette ="RdBu") +scale_fill_distiller(palette ="RdBu") +scale_shape_manual(values =c(17, 21)) +theme_dark() +labs(title ="Original Grid",subtitle ="Sea Surface Temperature")
And this figure below shoes what the interpolated values would be if given regularly spaced coordinates.
Code
# And the regular gridggplot() +geom_point(data = interpolated_vals, aes(lon, lat, color = sst), size =0.35) +scale_color_distiller(palette ="RdBu") +scale_shape_manual(values =c(17, 21)) +theme_dark() +labs(title ="Barycentric Interpolation to Regular Grid",subtitle ="Sea Surface Temperature")
FVCOM Daily Product Matching
There are a number of projects where biological samples are taken, but environmental conditions might not also be taken simultaneously. This adds the additional step of needing to index to the correct point in time and pull the correct node values associated with it.
For the years 2017-2023 we can use NECOFS forecast data, which uses the GOM4 mesh.
Get a look up table of the date matches
Write an equivalent function to identify the proper time index
use both lists to open connection and then subset necessary things
Source Code
---title: "Trianglular Interpolation & Time/Location Matching"description: | Approach for Fast Interpolation of Values within FVCOM Meshdate: "Updated on: `r Sys.Date()`"format: html: code-fold: true code-tools: true df-print: kable self-contained: trueexecute: echo: true warning: false message: false fig.align: "center" comment: ""---```{r}# Packages{library(raster) # netcdf data as rasterlibrary(sf) # vector spatial mapping/operationslibrary(fvcom) # fvcom mesh and variable extractionslibrary(ncdf4) # netcdf supportlibrary(tidyverse) # data wrangling and plottinglibrary(gmRi) # color schemes and cloud storage pathslibrary(patchwork) # plot arrangementlibrary(rnaturalearth) # coastlines and state polygonslibrary(geometry) # bathycentric coordinateslibrary(Matrix) # matrix algebralibrary(sysfonts) # font support}# Paths + conflictsconflicted::conflict_prefer("select", "dplyr")conflicted::conflict_prefer("filter", "dplyr")proj_path <-cs_path("mills", "Projects/Lobster ECOL")# fvcom_out <- str_c(proj_path, "FVCOM_support/")source(here::here("R/FVCOM_Support.R"))theme_set(theme_bw() +map_theme())# Load a maine polygonmaine <-ne_states("united states of america", returnclass ="sf") %>%filter(postal %in%c("ME", "NH", "MA", "CT", "RI", "VT"))# Gom EPU to clipres_shapes <-cs_path("res", "Shapefiles")epu_path <-str_c(res_shapes, "EPU/")gom_poly <-read_sf(str_c(epu_path, "individual_epus/GOM.geojson"))``````{r}#| label: style-sheet#| results: asis# Use GMRI styleuse_gmri_style_rmd()``````{r}#| label: fonts-config#| echo: false# Path to the directory containing the font file (replace with your actual path)font_dir <-paste0(system.file("stylesheets", package ="gmRi"), "/GMRI_fonts/Avenir/")# Register the fontfont_add(family ="Avenir",file.path(font_dir, "LTe50342.ttf"),bold =file.path(font_dir, "LTe50340.ttf"),italic =file.path(font_dir, "LTe50343.ttf"),bolditalic =file.path(font_dir, "LTe50347.ttf"))# Load the fontshowtext::showtext_auto()```# Approaches to Interpolate Values within Triangular MeshCalculating values for points within the FVCOM mesh involves some sort of interpolation. Potential approaches for achieving this may exclusively incorporate local information from the three nearest nodes (ex. nearest neighbor, inverse distance weighting, linear interpolation,barycentric coordinates) or instead model variance over changing distance using information across many nodes (ex. Inverse Distance Weighting IDW, Kriging).This quarto explores the applications of **Linear interpolation** & **Barycentric Interpolation** to interpolate values at many point locations across the FVCOM mesh area. These approaches take information from the threesurrounding nodes and interpolates a value based on either a linear combination of the vertices coordinates or by through the use of barycentric coordinates weights. These are both deterministic approaches that should perform quickly without overdue computational burdens and avoid decision making processes around parameter tuning.To first step for either workflow involves identifying the triangles that surround points we wish to interpolate, and extract from the source NetCDF file the variable values for the relevant three nodes of that triangle.To demonstrate the approach the following 100 points have been simulated at random. They occur during the time period that FVCOM's GOM3 mesh was in use, which we will use as the mesh for testing below.```{r}# Make a dataframe of random pointsset.seed(123)point_number <-100pts_df <-data.frame(pt_id =str_c("station_", seq(1, point_number)),lon =rnorm(point_number, mean =-68.5, sd =0.3),lat =rnorm(point_number, mean =43.2, sd =0.3),dtime =seq.Date(as.Date("2014-01-09"), as.Date("2016-06-12"), length.out = point_number)) %>%st_as_sf(coords =c("lon", "lat"), crs =4326, remove =FALSE)```The [fvcom](https://github.com/BigelowLab/fvcom/tree/68566ac2c161bde1f549d6157dc69cee2fc8e704)package created by Ben Tupper. The `fvcom::get_mesh_geometry` function provides functionality for constructing a simple feature collection for the FVCOM mesh which can be used for spatial operations. This simple features collection importantly includes point and element ID #'s that can be used to label which trianglar regions in the mesh should be associated with the points we wish to interpolateat.[FVCOM monthly-average hindcast data](http://www.smast.umassd.edu:8080/thredds/catalog/models/fvcom/NECOFS/Archive/Seaplan_33_Hindcast_v1/monthly_mean/catalog.html) will be used as our data source for constructing/using the mesh geometry. ```{r}#| label: load-fvcom-resources# Load some daily FVCOM that we downloaded and averaged ahead of time# These aren't 100% reliable but they will work for demonstration sakedaily_fpath <-cs_path("mills", "Projects/Lobster ECOL/FVCOM_processed/ignore_manually_downloaded/ignore_GOM3_daily")gom3_2014 <-read_csv(str_c(daily_fpath, "GOM3_daily_surfbottemp_2014_01.csv"))gom3_oneday <- gom3_2014 %>%filter(date =="2014-01-01")# Get some file names to load monthly filesgom3_files <-monthly_fvcom_inventory(start_yr =2014,end_yr =2014,fvcom_folder =cs_path("res", "FVCOM/monthly_means"),fvcom_vers ="GOM3") # Open onegom3_early <-nc_open(gom3_files[[1]])# Get the mesh itself as a simple feature collectiongom3_mesh <-get_mesh_geometry(gom3_early, what ='lonlat')```Below is a map of our simulated test locations and where they fall within the context of the FVCOMmesh within the Gulf of Maine Ecological Production Unit.```{r}#| label: plot test points on map# Prep CRS to clipgom_poly <-st_transform(gom_poly, st_crs(gom3_mesh))# Flag the locations that are within the domaingom_mesh <- gom3_mesh %>%st_join(gom_poly, join = st_within) %>%drop_na(EPU) %>% dplyr::select(-c(EPU, Shape_Leng, Shape_Area, source)) # Plot them allggplot() +geom_sf(data = gom_mesh, alpha =0.1, linewidth =0.1) +geom_sf(data = maine) +geom_sf(data = pts_df) +coord_sf(xlim =c(-71, -67.1), ylim =c(41.4, 44.4)) +labs(title ="Simulated sample locations")```# Step 1: Triangle Overlap Point-MatchingSince the mesh is static, we can use any time index to pull their locations and identify which triangles are associate with which points.This operation can be performed for all points in one step using `sf::st_join(, join = st_within)`.Once the three relevant nodes are identified we can calculate barycentric coordinates which can be used to weight the values at each node.```{r}#| label: overlay mesh on to points# We can use the mesh from Ben's function as the lookup table, # and elem numbers as triangle ID's# A bonus of going this route is that NODE and element ID numbers match ncvar_get index order# The meshgom3_mesh <-get_mesh_geometry(gom3_early, what ='lonlat') # We can also pull lon + lat from the netcdf as well and index that waygom3_coords <-data.frame("lon"=ncvar_get(gom3_early, "lon"),"lat"=ncvar_get(gom3_early, "lat")) %>%mutate(node_id =row_number(), .before ="lon")# Identify the triangles that overlap each point:# Use st_join to assign elem to the points and their we're looking uppts_assigned <-st_join(st_transform(pts_df, st_crs(gom3_mesh)), gom3_mesh, join = st_within)#### Getting Node Coordinates:# 1. Option 1: # Find triangle and its nodes that overlap, look the coords by p1, p2, p3 index number# Following the join The geometry is still the point here, # but now we know have columns for the node and center index numbers# 2. Option 2: Use triangle geometries themselves to pull node coordinates# We can use the triangles to pull coordinates from the sf geometries# What we really want are the point coordinates for barycentric distances# remember, lon & lat come in pairs so the index number grabs both# Using the values added to pts_assigned we can subset the relevant trianglestriangle_matches <- gom3_mesh[pts_assigned$elem,]```The following plot/code verifies that the random points indeed fall within the triangle they are matched to, and that the node ID's from that triangle match the index order of `ncvar_get()`for lat/lon based variables.```{r}#| label: check overlaid triangles match #### Sandbox Testing: # # Are the coordinates even different if projected to NAD83 or not? don't think so...# Ignore for now# pts_assigned %>% st_coordinates() %>% bind_cols(pts_df) %>% transmute(x_diff = lon - X, y_diff = lat - Y) # Nodes for one triangle# st_coordinates(triangle_matches[1,])[1:3,]# Do they also match the daily data row indices?# Hell yea, they do in space# need to triple check the order is fine# Seems Good!row_idx <-sample(c(1:nrow(pts_assigned)), size =1, replace = F )pt1_id = triangle_matches[row_idx, "p1"] %>%st_drop_geometry() %>%as.numeric()pt2_id = triangle_matches[row_idx,"p2"] %>%st_drop_geometry() %>%as.numeric()pt3_id = triangle_matches[row_idx,"p3"] %>%st_drop_geometry() %>%as.numeric()ggplot() +geom_point(data = gom3_oneday[pt1_id,], aes(lon, lat, color =str_c("Node: ", pt1_id)), size =3) +geom_point(data = gom3_oneday[pt2_id,], aes(lon, lat, color =str_c("Node: ", pt2_id)), size =3) +geom_point(data = gom3_oneday[pt3_id,], aes(lon, lat, color =str_c("Node: ", pt3_id)), size =3) +geom_point(data = pts_assigned[row_idx,], aes(lon, lat, color ="Test Point Location"), size =4) +scale_color_gmri() +geom_sf(data = triangle_matches[row_idx, ], color ="black", fill =NA, size =1) +labs(color ="Point", title ="Verify triangle overlap and node indexing of random point")# # But we need to make sure when we get distance the distance goes with the right point# # are they in order with st_coordinates?# # This is how we could match to lon;at coords:# pt_matches_x <- triangle_matches[row_idx, c("p1", "p2", "p3")] %>% # st_drop_geometry() %>% # t() %>% # c()# # And they do seem to match, sick# gom3_coords[pt_matches_x,]$lon == st_coordinates(triangle_matches[row_idx,])[1:3,"X"]# gom3_coords[pt_matches_x,]$lat == st_coordinates(triangle_matches[row_idx,])[1:3,"Y"]```### Add node details to points tableAfter confirming that the point id's from `fvcom::get_mesh_geometry()` correctly matched with the index order of `ncdf4::ncvar_get()`, we can confidently use those indices to quickly add the lat/lon coordinates of these nodes directly into the dataframe holding the points we wish to interpolate at. This is purely organizational, and keeps all information together in a single object.```{r}#| label: assign-nearest-node-coords-from-table# Objective:# Get the appropriate x1, x2, x3, y1, y2, y3, values into the "pnts_assigned"# can use either the mesh or use the lon/lat table, #### Option 2: Lon/Lat Table# The table is probably faster# Though I am worried if i hop between the two I might mix index order... idkgom3_coords <-data.frame("lon"=ncvar_get(gom3_early, "lon"),"lat"=ncvar_get(gom3_early, "lat")) %>%mutate(node_id =row_number(), .before ="lon")# This will add relevant coords to the tablepts_with_nearnodes <- pts_assigned %>%st_drop_geometry() %>%#rowwise() %>% mutate(x1 = gom3_coords[p1, "lon"],x2 = gom3_coords[p2, "lon"],x3 = gom3_coords[p3, "lon"],y1 = gom3_coords[p1, "lat"],y2 = gom3_coords[p2, "lat"],y3 = gom3_coords[p3, "lat"] )```# Method 1: - Triangulation with Linear InterpolationThe following approach follows code shared by Dr. Siqi Li with UMASS Dartmouth:https://github.com/SiqiLiOcean/matFVCOM/blob/main/interp_2d_calc_weight.m This is the method they use when interpolating from one FVCOM mesh to another, or for extracting point estimates from the mesh.Their code approaches the identification of the correct triangle using a different approach, but the use of matrix algebra to solve for the weights is the same.```{r}#| label: basic-linear-interp-premise#| eval: false# Take inverse of matrix A %*% b# Original 3x3 matrix, each column is x, y, 1 for triangle nodes(a_mat <-matrix(c(8,3,4, 1,5,9, 6,7,2), nrow =3, byrow = T))# 3x1 matrix for point location within triangle(b_mat <-matrix(c(3,1,12), nrow =3, byrow = T))# Inverse of matrix Aa_i <-solve(a_mat)# Weightswts <- a_i %*% b_mat# Show that weights can be used to work back to point coordinatesa_mat %*% wts```The following function will perform a rowwise application of the above steps, adding the weights to the dataframe. This should make solving the interpolation a vectorized operation.```{r}#| label: linear-weights-function# Function to add the linear interpolation weights based on node coordinatestriangulation_linear_weighting <-function(pts_sf, fvcom_mesh){# Identify the triangles that overlap each point:# Use st_join to assign elem to the points and their we're looking up pts_assigned <-st_join(st_transform(pts_sf, st_crs(fvcom_mesh)), gom3_mesh, join = st_within) %>%drop_na(elem)# Iterate over the rows to add weights: pts_weighted <- pts_assigned %>% base::split(., seq_len(nrow(.))) %>% purrr::map_dfr(function(pt_assigned){# Subset the relevant triangle from st_join info triangle_match <- fvcom_mesh[pt_assigned$elem,]# Build matrices for point to interpolate & of surrounding points:# Matrix for triangle# Use the triangles node coordinates from the sf geometries node_vertices <-t(st_coordinates(triangle_match[1,])[1:3,1:3])# Make matrix from the points: point_coords <-matrix(c(st_coordinates(pt_assigned[1,]), 1), nrow =3)#### For Linear Interpolation:# Get inverse of the matrix inverse_coordmat <-solve(node_vertices)# Solve for the weights node_wts <- inverse_coordmat %*% point_coords %>%t() %>%as.data.frame() %>%setNames(c("p1_wt", "p2_wt", "p3_wt"))# Return with dataframebind_cols(pt_assigned, node_wts) })# End Rowwisereturn(pts_weighted)}# Run the Jobpts_weighted <-triangulation_linear_weighting(pts_sf = pts_df, fvcom_mesh = gom3_mesh)``````{r}#| label: weighted-interpolation-function#### Apply Interpolation Step ####interpolate_from_weights <-function(df_weighted, fvcom_nc, fvcom_varid, fvcom_siglev =1, var_out ="interp_val"){# Get the values of the variable of interest as vector node_vals <-ncvar_get(nc = fvcom_nc, varid = fvcom_varid)[,fvcom_siglev] df_weighted %>%mutate( {{var_out}} := node_vals[p1] * p1_wt + node_vals[p2] * p2_wt + node_vals[p3] * p3_wt )}# Run the interpolationpts_interp <-interpolate_from_weights(df_weighted = pts_weighted, fvcom_nc = gom3_early, fvcom_varid ="temp", fvcom_siglev =1,var_out ="surf_temp")```This is what the interpolated values look like within their respective triangulations:```{r}# Values for nodes to compare against# Get Values to index them outsurface_temperature <-ncvar_get(nc = gom3_early,varid ="temp")[,1]t_nodes <-unlist(st_drop_geometry(pts_assigned)[, c("p1", "p2", "p3")])actual_t <-data.frame("lon"= gom3_coords[t_nodes, "lon"],"lat"= gom3_coords[t_nodes, "lat"],"sst"= surface_temperature[t_nodes])# Plot the map:ggplot() +geom_sf(data = triangle_matches, fill =NA) +geom_point(data = actual_t, aes(lon, lat, color = sst)) +geom_sf(data = pts_interp, aes(color = surf_temp)) +scale_color_distiller(palette ="RdBu") +theme_dark()```## Nearshore Area Regular Grid Test```{r}# Take the nearshore area of Gulf of Maine:inshore_sf <-st_read(str_c(proj_path, "Spatial_Defs/12nm_poly_statarea"), quiet = T)# Subset to maine areainshore_gom <-st_intersection(gom_poly, st_transform(inshore_sf, st_crs(gom_poly)))# plot# plot(inshore_gom$geometry)# Create a 4km grid using its min/max extent# st_bbox(st_transform(gom_poly), crs = 4326)# Clip to points within that area onlyinshore_reg_grid <-expand.grid(lon =seq(-71.1, -67.10, (1/24)),lat =seq(41.3, 44.62, (1/24))) %>%as.data.frame() %>%mutate(coord_pair =row_number(), .before ="lon") %>%st_as_sf(coords =c("lon", "lat"), crs =4326, remove =FALSE) %>%st_join(st_transform(inshore_gom, crs =4326),join = st_within) %>%drop_na(TARGET_FID)# # Plot the coverage:# ggplot(inshore_reg_grid) +# geom_sf(size = 0.2) +# labs(title = "1/24th Degree Inshore Area Target Resolution")# Get weightsinshore_weights <-triangulation_linear_weighting(pts_sf = inshore_reg_grid, fvcom_mesh = gom3_mesh)# Interpolateinshore_interp <-interpolate_from_weights(df_weighted = inshore_weights, fvcom_nc = gom3_early, fvcom_varid ="temp", fvcom_siglev =45,var_out ="bot_temp")# Plot the map:ggplot() +geom_sf(data = inshore_interp, aes(color = bot_temp), size =1, shape =15) +geom_sf(data = maine) +scale_color_distiller(palette ="RdBu") +theme_dark() +map_theme() +coord_sf(xlim =c(-71.5, -67.8), ylim =c(41.5, 44.6)) +labs(title ="1/24th Degree Inshore Area Target Resolution")# Plot```----# Method 2: Barycentric Interpolation# Getting Barycentric WeightsNow that we know where points fall and the coordinates for those points, we now just with their barycentric coordinates which will be used as weights.```{r}#| label: working-back-from-cart2baty# We need triangle nodes and the point to interpolate as matricestriangle_coord_test <- triangle_matches[1,] %>%st_coordinates()triangle_coord_test <- triangle_coord_test[c(1:3), c(1:2)]simplex_test <-as.matrix(triangle_coord_test)# The point to interpolatepoint_test <- pts_assigned[1, c("lon", "lat")] %>%st_drop_geometry() %>%as.matrix()# Get barycentric coordinatesnode_numbers <- triangle_matches[1,c("p1", "p2", "p3")] %>%st_drop_geometry()as.data.frame(cart2bary(X = simplex_test,P = point_test)) %>%setNames(node_numbers)```### Assign Barycentric WeightsAll the points can be done in row-wise operation this way. There definitely is a faster way to do this, but remember that we only need to do this once since the mesh doesn't really change.```{r}#| label: barycentric-weights-function# adapted from this, doesn't use trisearch# https://www.r-bloggers.com/2013/03/barycentric-interpolation-fast-interpolation-on-arbitrary-grids/# Function to do it for each unique row, fails if not uniquebarycentric_weights_from_row <-function(coordinates_df){# Make the simplex nodes_simplex <-matrix(as.numeric(c(coordinates_df[c("x1", "x2", "x3", "y1", "y2", "y3")])), nrow =3) node_indices <- coordinates_df[c("p1", "p2", "p3")]colnames(nodes_simplex) <-c("X", "Y")rownames(nodes_simplex) <- node_indices# Make matrix from the points: point_mat <-as.matrix(coordinates_df[1, c("lon", "lat")])# Get the coords bcoords_x <-as.data.frame(cart2bary(nodes_simplex, point_mat)) %>%setNames(c("p1_bary", "p2_bary", "p3_bary"))# Return with dataframebind_cols(coordinates_df, bcoords_x)}# # Test why it doesn't work:# coordinates_df <- pts_with_nearnodes %>% # split(.$pt_id) %>% # pluck(1)# barycentric_weights_from_row(coordinates_df = coordinates_df)# rm(coordinates_df)# Actually get all the weightsoverlaid_barycentric_weights <- pts_with_nearnodes %>%split(.$pt_id) %>%map_dfr(barycentric_weights_from_row)# # Print these# overlaid_barycentric_weights %>% # head() %>% # knitr::kable()```# Step 3: Interpolate Values using Matrix AlgebraThe following code takes the all the information we've gathered above and performs the interpolation:```{r}#### Interpolation Code Below ##### Apparently: Then we can sparse matrix it if we want fast interpolation...# # https://www.r-bloggers.com/2013/03/barycentric-interpolation-fast-interpolation-on-arbitrary-grids/# Define the interpolation as a sparse matrix operation. # Faster than using apply, probably slower than a C implementation# # The original code for this:# M <- sparseMatrix(# i = rep(1:nrow(Xi), each=3),# j = as.numeric(t(active)),# x = as.numeric(t(tri$p)),# dims = c(nrow(Xi), length(f)))# Xi was the df for new points# Active was the indices for the nodes# tri$p are the barycentric coords# f is matrix/array containing the values # My attempt to apply it to my stuff# Temperature Valuesstemp_vals <-ncvar_get(gom3_early, "temp")[,1]btemp_vals <-ncvar_get(gom3_early, "temp")[,45]# build sparse matrixM <-sparseMatrix(i =rep(1:nrow(overlaid_barycentric_weights), each =3),j =as.numeric(t(overlaid_barycentric_weights[,c("p1", "p2", "p3")])),x =as.numeric(t(overlaid_barycentric_weights[,c("p1_bary", "p2_bary", "p3_bary")])),dims =c(nrow(overlaid_barycentric_weights), length(stemp_vals)))# Matrix Multiply to Interpolateinterp_sst <-as.numeric(M %*% stemp_vals)interp_bt <-as.numeric(M %*% stemp_vals)```### Plot Interpolation ResultsThe following plot shows the interpolated values for our simulated points, and the nodes used when interpolating values for those points. The approach can be extended for any variable in the netcdf files, for example bottom temperature.```{r}# Combine with the pointsinterpolated_vals <-bind_cols( overlaid_barycentric_weights,data.frame("sst"= interp_sst),data.frame("bt"= interp_bt))# Values at relevant nodes onlyall_nodes <-unique(unlist(overlaid_barycentric_weights[,c("p1", "p2", "p3")]))node_sst <-data.frame("lon"= gom3_coords[all_nodes, "lon"],"lat"= gom3_coords[all_nodes, "lat"],"sst"= stemp_vals[all_nodes],"bt"= btemp_vals[all_nodes])# Values everywhere:gom3_early_temps <-bind_cols( gom3_coords,data.frame("sst"= stemp_vals),data.frame("bt"= stemp_vals), )# Plot something, chickenggplot() +geom_sf(data = triangle_matches, fill =NA, color ="white") +geom_point(data = node_sst, aes(lon, lat, color = sst, shape ="Original Mesh"), size =2, shape =17) +geom_point(data = interpolated_vals, aes(lon, lat, fill = sst, shape ="Interpolated Values"), size =2, shape =21, color ="black", show.legend = F) +scale_color_distiller(palette ="RdBu") +scale_fill_distiller(palette ="RdBu") +scale_shape_manual(values =c(17, 21)) +theme_dark() +labs(title ="Barycentric Interpolation",subtitle ="Sea Surface Temperature")```## Offshore Area Regular Grid TestOne useful application of this approach is to convert from the irregular grid to a more regularly spaced grid. This may be useful for un-biased polygon overlays or for direct comparison with a regularly spaced product.Another application of this approach is to convert from the more dense meshes of newer FVCOM versions like FVCOM-GOM5 to the sparser GOM3 mesh which has longer temporal coverage.The following is what a regularly spaced resolution similar to GLORYs (1/12th * 1/12th degree) looks like.```{r}# Make a Regular Gridreg_grid <-expand.grid(lon =seq(-75.75, -56.75, 0.083),lat =seq(35.25, 46.25, 0.083)) %>%as.data.frame() %>%mutate(coord_pair =row_number(), .before ="lon") %>%st_as_sf(coords =c("lon", "lat"), crs =4326, remove =FALSE)# Use st_join to assign elem to the points and their we're looking uppts_assigned <-st_join(st_transform(reg_grid, st_crs(gom3_mesh)), gom3_mesh, join = st_within) %>%drop_na(elem, p1, p2, p3)# This will add relevant coords to the tablepts_with_nearnodes <- pts_assigned %>%st_drop_geometry() %>%mutate(x1 = gom3_coords[p1, "lon"],x2 = gom3_coords[p2, "lon"],x3 = gom3_coords[p3, "lon"],y1 = gom3_coords[p1, "lat"],y2 = gom3_coords[p2, "lat"],y3 = gom3_coords[p3, "lat"] )# Get the barycentric weightsoverlaid_barycentric_weights <- pts_with_nearnodes %>%split(.$coord_pair) %>%map_dfr(barycentric_weights_from_row)# Do interpolation:# Temperature Valuesstemp_vals <-ncvar_get(gom3_early, "temp")[,1]btemp_vals <-ncvar_get(gom3_early, "temp")[,45]M <-sparseMatrix(i =rep(1:nrow(overlaid_barycentric_weights), each=3),j =as.numeric(t(overlaid_barycentric_weights[,c("p1", "p2", "p3")])),x =as.numeric(t(overlaid_barycentric_weights[,c("p1_bary", "p2_bary", "p3_bary")])),dims =c(nrow(overlaid_barycentric_weights), length(stemp_vals)))# Matrix Multiply to Interpolateinterp_sst <-as.numeric(M %*% stemp_vals)interp_bt <-as.numeric(M %*% stemp_vals)# Combine with the pointsinterpolated_vals <-bind_cols( overlaid_barycentric_weights,data.frame("sst"= interp_sst),data.frame("bt"= interp_bt))# Values at relevant nodes onlyall_nodes <-unique(unlist(overlaid_barycentric_weights[,c("p1", "p2", "p3")]))node_sst <-data.frame("lon"= gom3_coords[all_nodes, "lon"],"lat"= gom3_coords[all_nodes, "lat"],"sst"= stemp_vals[all_nodes],"bt"= btemp_vals[all_nodes])```The following plot shoes the original FVCOM GOM3 irregular mesh data.```{r}# Plot somethingggplot() +# geom_sf(data = gom3_mesh, fill = NA, color = "white", linewidth = 0.1) +geom_point(data = node_sst, aes(lon, lat, color = sst), size =0.35) +scale_color_distiller(palette ="RdBu") +scale_fill_distiller(palette ="RdBu") +scale_shape_manual(values =c(17, 21)) +theme_dark() +labs(title ="Original Grid",subtitle ="Sea Surface Temperature")```And this figure below shoes what the interpolated values would be if given regularly spaced coordinates.```{r}# And the regular gridggplot() +geom_point(data = interpolated_vals, aes(lon, lat, color = sst), size =0.35) +scale_color_distiller(palette ="RdBu") +scale_shape_manual(values =c(17, 21)) +theme_dark() +labs(title ="Barycentric Interpolation to Regular Grid",subtitle ="Sea Surface Temperature")```# FVCOM Daily Product MatchingThere are a number of projects where biological samples are taken, but environmental conditions might not also be taken simultaneously. This adds the additional step of needing to index to the correct point in time and pull the correct node values associated with it.For the years 2017-2023 we can use NECOFS forecast data, which uses the GOM4 mesh.```{r}#| label: NECOFS-data-access```## Get a look up table of the date matches## Write an equivalent function to identify the proper time index## use both lists to open connection and then subset necessary things```{r}```