12
12
from odc .geo .types import xy_
13
13
from odc .algo import mask_cleanup
14
14
from odc .geo .xr import xr_zeros
15
- from dea_tools .spatial import xr_rasterize
15
+ from dea_tools .spatial import xr_rasterize , xr_interpolate
16
16
from rasterio .features import sieve
17
17
from intertidal .io import (
18
18
load_aclum_mask ,
@@ -135,8 +135,10 @@ def load_connectivity_mask(
135
135
elevation_band = "dem_h" ,
136
136
resampling = "bilinear" ,
137
137
buffer = 20000 ,
138
+ preprocess = None ,
138
139
max_threshold = 100 ,
139
140
add_mangroves = False ,
141
+ correct_hat = False ,
140
142
mask_filters = [("dilation" , 3 )],
141
143
** cost_distance_kwargs ,
142
144
):
@@ -165,9 +167,23 @@ def load_connectivity_mask(
165
167
The distance by which to buffer the input GeoBox to reduce edge
166
168
effects. This buffer will eventually be removed and clipped back
167
169
to the original GeoBox extent. Defaults to 20,000 metres.
170
+ preprocess : function, optional
171
+ An optional lambda function that can be applied to the elevation
172
+ data prior to connecitivity analysis. Regardless of the outputs
173
+ of this function, the resulting data will always be clipped
174
+ between 0 and inf so that it is suitable for analysis. Defaults
175
+ to None.
168
176
max_threshold: int, optional
169
177
Value used to threshold the resulting cost distance to produce
170
178
a mask.
179
+ add_mangroves : bool, optional
180
+ Whether to use the extent of mangroves from Global Mangrove Watch
181
+ as additional starting points for the connectivity analysis.
182
+ Defaults to False.
183
+ correct_hat : bool, optional
184
+ Whether to apply a Highest Astronomical Tide correction, to make
185
+ costs based on height above HAT vs height above MSL. Defaults to
186
+ False.
171
187
mask_filters : list of tuples, optional
172
188
An optional list of morphological processing steps to pass to
173
189
the `mask_cleanup` function. The default is `[("dilation", 3)]`,
@@ -188,6 +204,7 @@ def load_connectivity_mask(
188
204
189
205
# Buffer input geobox and reduce resolution to ensure that the
190
206
# connectivity analysis is less affected by edge effects
207
+ print ("Loading SRTM data at native 30 m resolution" )
191
208
geobox_buffered = GeoBox .from_bbox (
192
209
geobox .buffered (xbuff = buffer , ybuff = buffer ).boundingbox ,
193
210
resolution = 30 ,
@@ -208,25 +225,44 @@ def load_connectivity_mask(
208
225
try :
209
226
gmw_da = load_gmw_mask (dem_da )
210
227
starts_da = (dem_da == dem_da .nodata ) | gmw_da
211
- except :
228
+ except Exception as e :
229
+ print (f"No valid GMW mangroves found: { e } " )
212
230
starts_da = dem_da == dem_da .nodata
213
231
else :
214
232
starts_da = dem_da == dem_da .nodata
215
233
216
- # Calculate cost surface (negative values are not allowed, so
217
- # negative nodata values are resolved by clipping values to between
234
+ # Raise error if no valid starting points
235
+ if not starts_da .any ():
236
+ raise Exception ("No valid starting points found for tile, likely due to being located too far inland" )
237
+
238
+ # Apply a Highest Astronomical Tide correction, to make
239
+ # costs based on height above HAT vs height above MSL
240
+ if correct_hat :
241
+ print ("Applying HAT correction" )
242
+ hat_correction = load_hat (dem_da )
243
+ dem_da = dem_da - hat_correction .data
244
+
245
+ # Calculate cost surface, optionally using custom preprocess
246
+ # function (negative values are not allowed, so negative
247
+ # nodata values are resolved by clipping values to between
218
248
# 0 and infinity)
219
- costs_da = dem_da .clip (0 , np .inf )
249
+ if preprocess is not None :
250
+ print ("Using custom pre-process function" )
251
+ costs_da = preprocess (dem_da ).clip (0 , np .inf )
252
+ else :
253
+ costs_da = dem_da .clip (0 , np .inf )
220
254
221
255
# Run cost distance surface
256
+ print ("Running cost distance calculation" )
222
257
costdist_da = xr_cost_distance (
223
258
cost_da = costs_da ,
224
259
starts_da = starts_da ,
225
260
** cost_distance_kwargs ,
226
- )
261
+ )
227
262
228
263
# Reproject back to original geobox extents and resolution
229
- costdist_da = costdist_da .odc .reproject (how = geobox )
264
+ print ("Reprojecting back to original GeoBox resolution" )
265
+ costdist_da = costdist_da .odc .reproject (how = geobox , resampling = "bilinear" )
230
266
231
267
# Apply threshold
232
268
costdist_mask = costdist_da < max_threshold
@@ -238,16 +274,55 @@ def load_connectivity_mask(
238
274
return costdist_mask , costdist_da
239
275
240
276
241
- def load_gmw_mask (ds , gmw_path = "/gdata1/ data/mangroves/gmw_v3_2007_vec_aus.geojson " ):
277
+ def load_gmw_mask (ds , gmw_path = "https://dea-public- data-dev.s3-ap-southeast-2.amazonaws.com/mangroves_aux/maximum_extent_of_mangroves_Apr2019.fgb " ):
242
278
"""
243
- Experiment with loading GMW data.
279
+ Experiment with loading GMW data to use as additional
280
+ starting points in connectivity analysis.
281
+ By default, this code uses the unioned GMW extents
282
+ that form the analysis area of the DEA Mangrove product
244
283
"""
245
284
gmw_gdf = gpd .read_file (
246
- gmw_path , bbox = ds .odc .geobox .boundingbox . to_crs ( "EPSG:4326" )
285
+ gmw_path , bbox = ds .odc .geobox .boundingbox
247
286
)
248
287
gmw_da = xr_rasterize (gmw_gdf , ds )
249
288
return gmw_da
250
289
290
+ def load_hat (
291
+ ds ,
292
+ hat_path = "https://dea-public-data-dev.s3-ap-southeast-2.amazonaws.com/HAT_MLP_Regression/HAT_MLP_Regression.gpkg" ,
293
+ layer = "HAT_MLP_Regression" ,
294
+ hat_col = "HAT" ,
295
+ interp_method = "idw" ,
296
+ interp_p = 1 ,
297
+ interp_k = 10 ,
298
+ interp_factor = 100 ,
299
+ ):
300
+ """
301
+ Experiment with interpolating CSIRO HAT data to use
302
+ as a correction to elevation values.
303
+
304
+ Branson, Paul (2023): Coastal carbon - Australia's blue forest
305
+ future - Water Levels. v1. CSIRO. Data Collection.
306
+ https://doi.org/10.25919/6672-jx11
307
+ """
308
+ # Load CSIRO HAT data
309
+ hat = gpd .read_file (
310
+ hat_path ,
311
+ layer = layer ,
312
+ engine = "pyogrio" ,
313
+ ).drop (["x" , "y" ], axis = 1 )
314
+
315
+ # Interpolate into extent of data
316
+ hat_correction = xr_interpolate (
317
+ ds = ds ,
318
+ gdf = hat [[hat_col , "geometry" ]],
319
+ method = interp_method ,
320
+ p = interp_p ,
321
+ k = interp_k ,
322
+ factor = interp_factor ,
323
+ ).HAT
324
+
325
+ return hat_correction
251
326
252
327
def extents (
253
328
dem , freq , corr , coastal_mask , urban_mask , min_correlation = 0.15 , sieve_size = 5
0 commit comments