Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
[
{
"id": "sentinel1_changedetection",
"type": "openeo",
"description": "Sentinel 1 change detection example",
"backend": "openeo.dataspace.copernicus.eu",
"process_graph": {
"s1stats1": {
"process_id": "sentinel1_changedetection",
"namespace": "https://raw.githubusercontent.com/ESA-APEx/apex_algorithms/refs/heads/main/algorithm_catalog/gisat/sentinel1_changedetection/openeo_udp/sentinel1_changedetection.json",
"arguments": {
"spatial_extent": {
"east": 8.92,
"north": 44.45,
"south": 44.4,
"west": 8.82
},
"temporal_extent": [
["2023-05-01", "2023-05-15"],
["2023-06-01", "2023-06-10"],
["2023-07-01", "2023-07-05"]
]
},
"result": true
}
},
"reference_data": {
"job-results.json": "https://s3.waw3-1.cloudferro.com/apex-benchmarks/gh-11743427213!tests_test_benchmarks.py__test_run_benchmark_sentinel1_stats_!actual/job-results.json",
"openEO.tif": "https://s3.waw3-1.cloudferro.com/apex-benchmarks/gh-11743427213!tests_test_benchmarks.py__test_run_benchmark_sentinel1_stats_!actual/openEO.tif"
}
}
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,222 @@
{
"process_graph": {
"loadcollection1": {
"process_id": "load_collection",
"arguments": {
"bands": [
"VH",
"VV"
],
"id": "SENTINEL1_GRD",
"spatial_extent": {
"from_parameter": "spatial_extent"
},
"temporal_extent": {
"from_parameter": "temporal_extent"
}
}
},
"sarbackscatter1": {
"process_id": "sar_backscatter",
"arguments": {
"coefficient": "sigma0-ellipsoid",
"contributing_area": false,
"data": {
"from_node": "loadcollection1"
},
"elevation_model": null,
"ellipsoid_incidence_angle": false,
"local_incidence_angle": false,
"mask": false,
"noise_removal": true
}
},
"aggregatetemporalperiod1": {
"process_id": "aggregate_temporal_period",
"arguments": {
"data": {
"from_node": "sarbackscatter1"
},
"period": "dekad",
"reducer": {
"process_graph": {
"mean1": {
"process_id": "mean",
"arguments": {
"data": {
"from_parameter": "data"
}
},
"result": true
}
}
}
}
},
"applydimension1": {
"process_id": "apply_dimension",
"arguments": {
"data": {
"from_node": "aggregatetemporalperiod1"
},
"dimension": "t",
"process": {
"process_graph": {
"arrayinterpolatelinear1": {
"process_id": "array_interpolate_linear",
"arguments": {
"data": {
"from_parameter": "data"
}
},
"result": true
}
}
}
}
},
"apply1": {
"process_id": "apply",
"arguments": {
"data": {
"from_node": "applydimension1"
},
"process": {
"process_graph": {
"log1": {
"process_id": "log",
"arguments": {
"base": 10,
"x": {
"from_parameter": "x"
}
}
},
"multiply1": {
"process_id": "multiply",
"arguments": {
"x": 10,
"y": {
"from_node": "log1"
}
},
"result": true
}
}
}
}
},
"applydimension2": {
"process_id": "apply_dimension",
"arguments": {
"data": {
"from_node": "apply1"
},
"dimension": "t",
"process": {
"process_graph": {
"arraycreate1": {
"process_id": "array_create",
"arguments": {
"data": {
"from_parameter": "data"
}
},
"result": true
}
}
},
"target_dimension": "bands"
}
},
"renamelabels1": {
"process_id": "rename_labels",
"arguments": {
"data": {
"from_node": "applydimension2"
},
"dimension": "bands",
"target": [
"VV_t01",
"VV_t02",
"VV_t03",
"VV_t04",
"VV_t05",
"VV_t06",
"VV_t07",
"VV_t08",
"VV_t09",
"VV_t10",
"VH_t01",
"VH_t02",
"VH_t03",
"VH_t04",
"VH_t05",
"VH_t06",
"VH_t07",
"VH_t08",
"VH_t09",
"VH_t10"
]
}
},
"applydimension3": {
"process_id": "apply_dimension",
"arguments": {
"data": {
"from_node": "renamelabels1"
},
"dimension": "bands",
"process": {
"process_graph": {
"runudf1": {
"process_id": "run_udf",
"arguments": {
"data": {
"from_parameter": "data"
},
"runtime": "Python",
"udf": "\nimport xarray as xr\nimport numpy as np\nfrom scipy import stats\nfrom skimage.morphology import remove_small_holes\nfrom copy import copy\nfrom openeo.udf.debug import inspect\n\nDEBUG = False\n\n\ndef create_nan_mask(numpy_array, vv_vh_bandcount):\n # Create masks for both VV and VH parts of the array\n mask_vv = np.isnan(numpy_array[:vv_vh_bandcount]) | (numpy_array[:vv_vh_bandcount] < -99)\n mask_vh = np.isnan(numpy_array[vv_vh_bandcount:2 * vv_vh_bandcount]) | (\n numpy_array[vv_vh_bandcount:2 * vv_vh_bandcount] < -99)\n\n # Combine the masks (element-wise logical OR)\n combined_mask = mask_vv | mask_vh\n return combined_mask\n\n\ndef apply_threshold(stat_array, pol_item, DEC_array_threshold,\n stat_item_name=None, previous_stat_array_bool=None):\n stat_array_copy = copy(stat_array)\n\n if stat_item_name == 'std':\n if pol_item == \"VH\":\n pol_thr = 2.0\n else:\n pol_thr = 1.5\n stat_array = np.where(np.isnan(stat_array) | (stat_array < pol_thr), 0, 1)\n\n elif stat_item_name == 'mean_change':\n pol_thr = -1.75\n stat_array = np.where(np.isnan(stat_array) | (stat_array > pol_thr), 0, 1)\n\n elif stat_item_name == 'tstat':\n tstat_thr = 3.5\n stat_array = np.where(np.isnan(stat_array) | (stat_array < tstat_thr), 0, 1)\n if previous_stat_array_bool is not None:\n stat_array[previous_stat_array_bool == 0] = 0\n\n elif stat_item_name == 'pval':\n pvalue_thr = 0.1\n stat_array = np.where(np.isnan(stat_array) | (stat_array > pvalue_thr), 0, 1)\n if previous_stat_array_bool is not None:\n stat_array[previous_stat_array_bool == 0] = 0\n\n elif stat_item_name == 'ratio_slope':\n stat_thr = 0.025\n stat_array = np.where(np.isnan(stat_array) | (stat_array < stat_thr), 0, 1)\n\n elif stat_item_name == 'ratio_rsquared':\n stat_thr = 0.6\n stat_array = np.where(np.isnan(stat_array) | (stat_array < stat_thr), 0, 1)\n\n elif stat_item_name == 'ratio_mean_change':\n stat_thr = 2.0\n stat_array = np.where(np.isnan(stat_array) | (stat_array < stat_thr), 0, 1)\n\n elif stat_item_name == 'ratio_tstat':\n tstat_thr = 3.5\n stat_array = np.where(np.isnan(stat_array) | (stat_array > tstat_thr), 0, 1)\n if previous_stat_array_bool is not None:\n stat_array[previous_stat_array_bool == 0] = 0\n\n elif stat_item_name == 'ratio_pval':\n pvalue_thr = 0.1\n stat_array = np.where(np.isnan(stat_array) | (stat_array > pvalue_thr), 0, 1)\n if previous_stat_array_bool is not None:\n stat_array[previous_stat_array_bool == 0] = 0\n\n DEC_array_threshold += stat_array.astype(int)\n if DEBUG:\n return DEC_array_threshold, stat_array_copy, stat_array.astype(int)\n else:\n return DEC_array_threshold, None, None\n\n\ndef calculate_lsfit_r(vv_vh_r, vv_vh_bandcount):\n\n x = np.arange(vv_vh_bandcount)\n A = np.c_[x, np.ones_like(x)]\n\n y = np.reshape(vv_vh_r, (vv_vh_bandcount, -1))\n\n col_mean = np.nanmean(y, axis=0)\n inds = np.where(np.isnan(y))\n y[inds] = np.take(col_mean, inds[1])\n\n np.nan_to_num(y, copy=False, nan=0.0)\n\n m, resid, rank, s = np.linalg.lstsq(A, y, rcond=None)\n\n slope_r = np.reshape(m[0], vv_vh_r.shape[1:])\n r_squared = 1 - resid / (x.size * np.var(y, axis=0))\n r_squared = np.reshape(r_squared, vv_vh_r.shape[1:])\n\n return slope_r, r_squared\n\n\ndef apply_datacube(cube: xr.DataArray, context: dict) -> xr.DataArray:\n # Squeeze the 'bands' dimension only if its length is 1\n if 'bands' in cube.dims and cube.sizes['bands'] == 1:\n cube_squeezed = cube.squeeze(dim='bands')\n else:\n cube_squeezed = cube\n\n # Convert to NumPy array (now shape is (2, 100, 100))\n numpy_array = cube_squeezed.values.astype(float)\n del cube_squeezed\n if DEBUG:\n numpy_array = numpy_array[:, 0:128, 0:128]\n numpy_array[numpy_array < -99] = np.nan\n\n bands, dim1, dim2 = numpy_array.shape\n total_time_steps = bands // 2\n window_size = 10\n half_window = window_size // 2\n vv_vh_bandcount = total_time_steps\n\n DEC_array_threshold_list = []\n DEC_array_mask_list = []\n\n for i in range(total_time_steps - window_size + 1):\n master_combined_metrics = [] # Initialize master list to store metrics for each polarization\n\n DEC_array_threshold = np.zeros((dim1, dim2), dtype=int)\n\n # Process bands directly to conserve RAM\n for pol_index, pol_item in enumerate([\"VV\", \"VH\"]):\n\n # Loop twice for the two halves\n # Build index arrays for past and future\n if pol_item == \"VV\":\n # VV: past starts from i to i+5, future from i+5 to i+10\n pol_stack_past = list(np.arange(i, i + half_window))\n pol_stack_future = list(np.arange(i + half_window, i + window_size))\n else: # pol_item == \"VH\"\n # VH bands start after VV\n start_idx = vv_vh_bandcount # offset for VH bands\n pol_stack_past = list(np.arange(start_idx + i, start_idx + i + half_window))\n pol_stack_future = list(np.arange(start_idx + i + half_window, start_idx + i + window_size))\n\n # Combine past + future slices\n full_stack_indices = np.concatenate((pol_stack_past, pol_stack_future))\n if DEBUG:\n print(f\"time {i}: {pol_item} stack past: {pol_stack_past} \")\n print(f\"time {i}: {pol_item} stack future: {pol_stack_future} \")\n\n # Calculate the mean for Stack_p along the band axis (axis=0)\n Stack_p_MIN = np.nanmean(numpy_array[pol_stack_past], axis=0)\n\n # Calculate the mean for Stack_f along the band axis (axis=0)\n Stack_f_MIN = np.nanmean(numpy_array[pol_stack_future], axis=0)\n\n POL_std = np.nanstd(numpy_array[pol_stack_past + pol_stack_future], axis=0)\n DEC_array_threshold, POL_std, _ = apply_threshold(POL_std, pol_item, DEC_array_threshold,\n stat_item_name=\"std\")\n if not DEBUG: del POL_std\n\n ######## MOVING WINDOW TTEST ON STACK\n POL_mean_change = np.subtract(Stack_f_MIN, Stack_p_MIN)\n DEC_array_threshold, POL_mean_change, POL_mean_change_bool = apply_threshold(POL_mean_change, pol_item,\n DEC_array_threshold,\n stat_item_name=\"mean_change\")\n if not DEBUG: del POL_mean_change\n\n # Perform t-test across bands\n ttest_results = stats.ttest_ind(numpy_array[pol_stack_past],\n numpy_array[pol_stack_future],\n axis=0, nan_policy='omit')\n ttest_pvalue, ttest_tstatistic = ttest_results.pvalue, ttest_results.statistic\n DEC_array_threshold, ttest_pvalue, _ = apply_threshold(ttest_pvalue, pol_item, DEC_array_threshold,\n stat_item_name=\"pval\",\n previous_stat_array_bool=POL_mean_change_bool)\n DEC_array_threshold, ttest_tstatistic, _ = apply_threshold(ttest_tstatistic, pol_item, DEC_array_threshold,\n stat_item_name=\"tstat\",\n previous_stat_array_bool=POL_mean_change_bool)\n if not DEBUG: del ttest_results, ttest_pvalue, ttest_tstatistic\n del _, POL_mean_change_bool\n\n if DEBUG:\n combined_metrics = np.stack([\n Stack_p_MIN,\n Stack_f_MIN,\n POL_std,\n POL_mean_change,\n ttest_pvalue,\n ttest_tstatistic\n ], axis=0)\n\n # Append to master list for both VV and VH\n master_combined_metrics.append(combined_metrics)\n else:\n del pol_stack_past, pol_stack_future, Stack_p_MIN, Stack_f_MIN\n\n if DEBUG:\n # Convert master list to numpy array for final output\n master_combined_metrics = np.concatenate(master_combined_metrics, axis=0)\n\n ## VV - VH\n # vv_vh_r = np.subtract(numpy_array[list(np.arange(vv_vh_bandcount))] -\n # numpy_array[list(np.arange(vv_vh_bandcount) + vv_vh_bandcount)])\n vv_vh_r = numpy_array[i: i + window_size] - numpy_array[vv_vh_bandcount + i: vv_vh_bandcount + i + window_size]\n\n ratio_slope, ratio_r_squared = calculate_lsfit_r(vv_vh_r, window_size)\n DEC_array_threshold, ratio_slope, _ = apply_threshold(ratio_slope, pol_item, DEC_array_threshold,\n stat_item_name=\"ratio_slope\")\n DEC_array_threshold, ratio_r_squared, _ = apply_threshold(ratio_r_squared, pol_item, DEC_array_threshold,\n stat_item_name=\"ratio_rsquared\")\n\n if not DEBUG:\n del ratio_slope, ratio_r_squared\n\n # Calculate Mean Change Between Future and Past Stacks\n ratio_mean_change = (np.nanmean(vv_vh_r[list(np.arange(half_window) + half_window)], axis=0) -\n np.nanmean(vv_vh_r[list(np.arange(half_window))], axis=0)\n )\n DEC_array_threshold, ratio_mean_change, ratio_mean_change_bool = apply_threshold(ratio_mean_change, pol_item,\n DEC_array_threshold,\n stat_item_name=\"ratio_mean_change\")\n\n if not DEBUG:\n del ratio_mean_change\n\n # Perform T-Test Efficiently\n ratio_ttest_results = stats.ttest_ind(vv_vh_r[list(np.arange(half_window))],\n vv_vh_r[list(np.arange(half_window) + half_window)],\n axis=0, nan_policy='omit')\n # Extract p-value and t-statistic from the result\n DEC_array_threshold, ratio_ttest_pvalue, _ = apply_threshold(ratio_ttest_results.pvalue, pol_item,\n DEC_array_threshold,\n stat_item_name=\"ratio_pval\",\n previous_stat_array_bool=ratio_mean_change_bool)\n DEC_array_threshold, ratio_ttest_tstatistic, _ = apply_threshold(ratio_ttest_results.statistic, pol_item,\n DEC_array_threshold,\n stat_item_name=\"ratio_tstat\",\n previous_stat_array_bool=ratio_mean_change_bool)\n\n if not DEBUG:\n del ratio_ttest_results, ratio_mean_change_bool, ratio_ttest_pvalue, ratio_ttest_tstatistic\n\n if DEBUG:\n ratio_combined_metrics = np.stack([\n ratio_slope,\n ratio_r_squared,\n ratio_mean_change,\n ratio_ttest_pvalue,\n ratio_ttest_tstatistic\n ], axis=0)\n\n DEC_array_mask = np.zeros_like(DEC_array_threshold)\n DEC_array_mask[DEC_array_threshold > 3] = 1\n\n # Convert to boolean array (assuming the input is binary, 0 and 1)\n # Invert binary band\n inverted_band = np.logical_not(DEC_array_mask.astype(bool))\n processed_band = remove_small_holes(inverted_band, area_threshold=15)\n # Invert back to original\n DEC_array_mask = np.logical_not(processed_band).astype(np.uint8)\n\n if DEBUG:\n DEC_array_mask_list.append(DEC_array_mask)\n DEC_array_threshold_list.append(DEC_array_threshold)\n combined_data = np.concatenate([\n master_combined_metrics, # Shape (1, y, x) for predicted labels\n ratio_combined_metrics # Shape (n_classes, y, x) for probabilities\n ], axis=0)\n else:\n DEC_array_mask_list.append(DEC_array_mask)\n DEC_array_threshold_list.append(DEC_array_threshold)\n\n DEC_array_mask_threshold_array = np.stack(DEC_array_mask_list + DEC_array_threshold_list, axis=0)\n\n return xr.DataArray(\n DEC_array_mask_threshold_array,\n dims=[\"bands\", \"y\", \"x\"],\n coords={\n 'y': cube.coords['y'],\n 'x': cube.coords['x']\n }\n )\n\n"
},
"result": true
}
}
}
},
"result": true
}
},
"id": "s1_mcd_5x12day_mean",
"summary": "S1 GRD VV,VH aggregated over 5 consecutive 12-day windows.",
"description": "From the start of the provided temporal extent, build 5 consecutive 12-day intervals, load Sentinel-1 GRD (VV,VH), and compute the mean for each window.",
"parameters": [
{
"name": "temporal_extent",
"description": "The time window to calculate the stats for.",
"schema": {
"type": "array",
"subtype": "temporal-interval"
},
"default": [
"2023-05-01",
"2023-07-30"
],
"optional": true
},
{
"name": "spatial_extent",
"description": "The spatial extent to calculate the stats for.",
"schema": {
"type": "object",
"subtype": "bounding-box"
},
"default": {
"west": 8.82,
"south": 44.4,
"east": 8.92,
"north": 44.45
},
"optional": true
}
]
}
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Loading