diff --git a/.vscode/launch.json b/.vscode/launch.json
new file mode 100644
index 00000000..f83c4731
--- /dev/null
+++ b/.vscode/launch.json
@@ -0,0 +1,224 @@
+{
+ // Use IntelliSense to learn about possible attributes.
+ // Hover to view descriptions of existing attributes.
+ // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
+ "version": "0.2.0",
+ "configurations": [
+ {
+ "name": "Python Debugger: MAG TT Standalone PA-land-clip.json",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${workspaceFolder}/../map-auto-generation/workflow/mesh_tools/touchterrain/TouchTerrain_standalone.py",
+ "console": "integratedTerminal",
+ "cwd": "${workspaceFolder}/../map-auto-generation/",
+ "args": ["../templates//il-israel-linear//workspace//ttConfig/PA-land.json", "../templates/output/il-israel-linear/250/PA/"]
+ },
+ {
+ "name": "Python Debugger: MAG TT Standalone PA3-land-clip.json",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${workspaceFolder}/../map-auto-generation/workflow/mesh_tools/touchterrain/TouchTerrain_standalone.py",
+ "console": "integratedTerminal",
+ "cwd": "${workspaceFolder}/../map-auto-generation/",
+ "args": ["../templates//il-israel-linear//workspace//ttConfig/PA3-land.json", "../templates/output/il-israel-linear/250/PA/"]
+ },
+ {
+ "name": "Python Debugger: MAG TT Standalone PA3-water-clip.json",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${workspaceFolder}/../map-auto-generation/workflow/mesh_tools/touchterrain/TouchTerrain_standalone.py",
+ "console": "integratedTerminal",
+ "cwd": "${workspaceFolder}/../map-auto-generation/",
+ "args": ["../templates//il-israel-linear//workspace//ttConfig/PA3-water.json", "../templates/output/il-israel-linear/250/PA/"]
+ },
+ {
+ "name": "Python Debugger: TT Standalone PA-river.json",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${workspaceFolder}/TouchTerrain_standalone.py",
+ "console": "integratedTerminal",
+ "cwd": "${workspaceFolder}/../touchterrain-dev/",
+ "args": "PA-river.json"
+ },
+ {
+ "name": "Python Debugger: TT Standalone IP-land.json",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${workspaceFolder}/TouchTerrain_standalone.py",
+ "console": "integratedTerminal",
+ "cwd": "${workspaceFolder}/../touchterrain-dev/",
+ "args": "IP-land.json"
+ },
+ {
+ "name": "Python Debugger: TT Standalone IP-water.json",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${workspaceFolder}/TouchTerrain_standalone.py",
+ "console": "integratedTerminal",
+ "cwd": "${workspaceFolder}/../touchterrain-dev/",
+ "args": "IP-water.json"
+ },
+ {
+ "name": "Python Debugger: TT Standalone IP-land-clip.json",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${workspaceFolder}/TouchTerrain_standalone.py",
+ "console": "integratedTerminal",
+ "cwd": "${workspaceFolder}/../touchterrain-dev/",
+ "args": "IP-land-clip.json"
+ },
+ {
+ "name": "Python Debugger: TT Standalone IP-water-clip.json",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${workspaceFolder}/TouchTerrain_standalone.py",
+ "console": "integratedTerminal",
+ "cwd": "${workspaceFolder}/../touchterrain-dev/",
+ "args": "IP-water-clip.json"
+ },
+ {
+ "name": "Python Debugger: TT Standalone DC-new-land-only.json",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${workspaceFolder}/TouchTerrain_standalone.py",
+ "console": "integratedTerminal",
+ "cwd": "${workspaceFolder}/../touchterrain-dev/",
+ "args": "DC-new-land-only.json"
+ },
+ {
+ "name": "Python Debugger: TT Standalone DC-new-land.json",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${workspaceFolder}/TouchTerrain_standalone.py",
+ "console": "integratedTerminal",
+ "cwd": "${workspaceFolder}/../touchterrain-dev/",
+ "args": "DC-new-land.json"
+ },
+ {
+ "name": "Python Debugger: TT Standalone DC-new-river.json",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${workspaceFolder}/TouchTerrain_standalone.py",
+ "console": "integratedTerminal",
+ "cwd": "${workspaceFolder}/../touchterrain-dev/",
+ "args": "DC-new-river.json"
+ },
+ {
+ "name": "Python Debugger: TT Standalone DC-new-land-clip.json",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${workspaceFolder}/TouchTerrain_standalone.py",
+ "console": "integratedTerminal",
+ "cwd": "${workspaceFolder}/../touchterrain-dev/",
+ "args": "DC-new-land-clip.json"
+ },
+ {
+ "name": "Python Debugger: TT Standalone DC-new-river-clip.json",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${workspaceFolder}/TouchTerrain_standalone.py",
+ "console": "integratedTerminal",
+ "cwd": "${workspaceFolder}/../touchterrain-dev/",
+ "args": "DC-new-river-clip.json"
+ },
+ {
+ "name": "Python Debugger: TT Standalone DC-new-land-thru.json",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${workspaceFolder}/TouchTerrain_standalone.py",
+ "console": "integratedTerminal",
+ "cwd": "${workspaceFolder}/../touchterrain-dev/",
+ "args": "DC-new-land-thru.json"
+ },
+ {
+ "name": "Python Debugger: TT Standalone DC-new-river-thru.json",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${workspaceFolder}/TouchTerrain_standalone.py",
+ "console": "integratedTerminal",
+ "cwd": "${workspaceFolder}/../touchterrain-dev/",
+ "args": "DC-new-river-thru.json"
+ },
+ {
+ "name": "Python Debugger: TT Standalone WV-new-land-only.json",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${workspaceFolder}/TouchTerrain_standalone.py",
+ "console": "integratedTerminal",
+ "cwd": "${workspaceFolder}/../touchterrain-dev/",
+ "args": "WV-new-land-only.json"
+ },
+ {
+ "name": "Python Debugger: TT Standalone WV-new-land.json",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${workspaceFolder}/TouchTerrain_standalone.py",
+ "console": "integratedTerminal",
+ "cwd": "${workspaceFolder}/../touchterrain-dev/",
+ "args": "WV-new-land.json"
+ },
+ {
+ "name": "Python Debugger: TT Standalone WV-new-river.json",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${workspaceFolder}/TouchTerrain_standalone.py",
+ "console": "integratedTerminal",
+ "cwd": "${workspaceFolder}/../touchterrain-dev/",
+ "args": "WV-new-river.json"
+ },
+ {
+ "name": "Python Debugger: TT Standalone WV-new-land-thru.json",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${workspaceFolder}/TouchTerrain_standalone.py",
+ "console": "integratedTerminal",
+ "cwd": "${workspaceFolder}/../touchterrain-dev/",
+ "args": "WV-new-land-thru.json"
+ },
+ {
+ "name": "Python Debugger: TT Standalone WV-new-river-thru.json",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${workspaceFolder}/TouchTerrain_standalone.py",
+ "console": "integratedTerminal",
+ "cwd": "${workspaceFolder}/../touchterrain-dev/",
+ "args": "WV-new-river-thru.json"
+ },
+ {
+ "name": "Python Debugger: TT Standalone simplePolygon-new-land.json",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${workspaceFolder}/TouchTerrain_standalone.py",
+ "console": "integratedTerminal",
+ "cwd": "${workspaceFolder}/../touchterrain-dev/",
+ "args": "simplePolygon-new-land.json"
+ },
+ {
+ "name": "Python Debugger: TT Standalone simplePolygon-new-river.json",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${workspaceFolder}/TouchTerrain_standalone.py",
+ "console": "integratedTerminal",
+ "cwd": "${workspaceFolder}/../touchterrain-dev/",
+ "args": "simplePolygon-new-river.json"
+ },
+ {
+ "name": "Python Debugger: TT Standalone simplePolygon-new-land-clip.json",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${workspaceFolder}/TouchTerrain_standalone.py",
+ "console": "integratedTerminal",
+ "cwd": "${workspaceFolder}/../touchterrain-dev/",
+ "args": "simplePolygon-new-land-clip.json"
+ },
+ {
+ "name": "Python Debugger: TT Standalone simplePolygon-new-river-clip.json",
+ "type": "debugpy",
+ "request": "launch",
+ "program": "${workspaceFolder}/TouchTerrain_standalone.py",
+ "console": "integratedTerminal",
+ "cwd": "${workspaceFolder}/../touchterrain-dev/",
+ "args": "simplePolygon-new-river-clip.json"
+ },
+ ]
+}
\ No newline at end of file
diff --git a/.vscode/settings.json b/.vscode/settings.json
new file mode 100644
index 00000000..e005eaa9
--- /dev/null
+++ b/.vscode/settings.json
@@ -0,0 +1,14 @@
+{
+ "python-envs.defaultEnvManager": "ms-python.python:conda",
+ "python-envs.defaultPackageManager": "ms-python.python:conda",
+ "python-envs.pythonProjects": [],
+ "python.testing.unittestArgs": [
+ "-v",
+ "-s",
+ "./test",
+ "-p",
+ "test_*.py"
+ ],
+ "python.testing.pytestEnabled": false,
+ "python.testing.unittestEnabled": true
+}
\ No newline at end of file
diff --git a/TouchTerrain_standalone.py b/TouchTerrain_standalone.py
index c2361b01..9f7787fe 100644
--- a/TouchTerrain_standalone.py
+++ b/TouchTerrain_standalone.py
@@ -28,17 +28,21 @@
'''
import time
import json
-import sys, os
+import os
+import sys
+
from os.path import abspath, dirname
+from typing import Any
try:
from touchterrain.common import TouchTerrainEarthEngine as TouchTerrain
from touchterrain.common.TouchTerrainGPX import *
-except:
- print("Error: touchterrain module is not installed. Use pip install . in the same folder as setup.py")
+ from touchterrain.common.user_config import TouchTerrainConfig
+except Exception as e:
+ print(e)
+ print("Error: touchterrain module is not installed or error happened when importing. Use pip install . in the same folder as pyproject.toml to install touchterrain.")
sys.exit()
-
#
# How to run the standalone version:
#
@@ -56,59 +60,20 @@
# main function, will be called at the end of the script
def main():
- # Default parameters:
- # The JSON file overwrites values for the following keys, which are used as
- # args for get_zipped_tiles()
- args = {
- "DEM_name": 'USGS/3DEP/10m',# DEM_name: name of DEM source used in Google Earth Engine
- # for all valid sources, see DEM_sources in TouchTerrainEarthEngine.py
- "trlat": 44.69741706507476, # lat/lon of top right corner
- "trlon": -107.97962089843747,
- "bllat": 44.50185267072875, # lat/lon of bottom left corner
- "bllon": -108.25427910156247,
- "clean_diags": False, # clean 2x2 diagonals
- "poly_file": None, # path to a local kml file
- "polyURL": None, # URL to a publicly readable(!) kml file on Google Drive
- "importedDEM": None, # if not None, the raster file to use as DEM instead of using GEE (null in JSON)
- "printres": 0.5, # resolution (horizontal) of 3D printer (= size of one pixel) in mm
- "ntilesx": 1, # number of tiles in x and y
- "ntilesy": 1,
- "tilewidth": 80, # width of each tile in mm (<- !!!!!), tile height is calculated
- "basethick": 1, # thickness (in mm) of printed base
- "zscale": 1.0, # elevation (vertical) scaling
- "fileformat": "STLb", # format of 3D model files: "obj" wavefront obj (ascii),"STLa" ascii STL or "STLb" binary STL
- "tile_centered": False, # True-> all tiles are centered around 0/0, False, all tiles "fit together"
- "zip_file_name": "terrain", # base name of zipfile, .zip will be added
- "CPU_cores_to_use" : 0, # 0 means all cores, None (null in JSON!) => don't use multiprocessing
- "max_cells_for_memory_only" : 1000 * 1000, # if raster is bigger, use temp_files instead of memory
-
- # these are the args that could be given "manually" via the web UI
- "no_bottom": False, # omit bottom triangles?
- #"rot_degs": 0, # rotate by degrees ccw # CH disabled for now
- "bottom_image": None, # 1 band greyscale image used for bottom relief
- "ignore_leq": None, # set values <= this to NaN, so they are ignored
- "lower_leq": None, # e.g. [0.0, 2.0] values <= 0.0 will be lowered by 2mm in the final model
- "unprojected": False, # don't project to UTM, only usefull when using GEE for DEM rasters
- "only": None,# list of tile index [x,y] with is the only tile to be processed. None means process all tiles (index is 1 based)
- "importedGPX": None, # Plot GPX paths from files onto the model.
- "gpxPathHeight": 100, # Currently we plot the GPX path by simply adjusting the raster elevation at the specified lat/lon,
- # therefore this is in meters. Negative numbers are ok and put a dent in the model
- "gpxPixelsBetweenPoints" : 20, # GPX Files haves a lot of points. A higher number will create more space between lines drawn
- # on the model and can have the effect of making the paths look a bit cleaner
- "gpxPathThickness" : 5, # Stack parallel lines on either side of primary line to create thickness.
- "smooth_borders": True, # smooth borders
- "offset_masks_lower": None, # e.g. [[filename, offset], [filename2, offset2],...] Masked regions (pixel values > 0) in the file will be lowered by offset(mm) * pixel value in the final model.
- "fill_holes": None, # e.g. [10, 7] Specify number of interations to find a neighbor threshold to fill holes. -1 iterations will continue iterations until no more holes are found. Defaults to 7 neighbors in a 3x3 footprint with elevation > 0 to fill a hole with the average of the footprint.
- "min_elev" : None, # None means: will be calculated from actual elevation later. min_elev defines the elevation that will be at base_thickness
+ default_args = TouchTerrainConfig()
+
+ # Fill args dict with the bare minimum of default values
+ args: dict[str, Any] = {
+ "importedDEM": default_args.importedDEM,
+ "importedDEM_interp": default_args.importedDEM_interp,
+ "offset_masks_lower": default_args.offset_masks_lower
}
# write an example json file, in case it gets deleted ...
with open('example_config.json', 'w+') as fp:
- json.dump(args, fp, indent=0, sort_keys=True) # indent = 0: newline after each comma
+ json.dump(default_args, fp, indent=2, default=vars)
print('Wrote example_config.json with default values, you can use it as a template but make sure to rename it!')
-
-
# parse args
if len(sys.argv) > 1: # sys.argv are the CLI args
json_fname = sys.argv[1]
@@ -125,62 +90,22 @@ def main():
print("reading", json_fname)
- for k in list(args.keys()):
- try:
- args[k] = json_args[k] # try to find a value for k in json config file
- #print(k, args[k])
- except:
- print("info:", k, "has missing or invalid value, using defaults where possible") # no match? no problem, just keep the default value
- #print("%s = %s" % (k, str(args[k])))
- else:
- # no JSON config file given, setting config values in code
- # you can comment out lines for which you don't want to overwrite the default settings
- overwrite_args = {
- "DEM_name": 'USGS/3DEP/10m',# DEM_name: name of DEM source used in Google Earth Engine
- # for all valid sources, see DEM_sources in TouchTerrainEarthEngine.py
- "trlat": 44.69741706507476, # lat/lon of top right corner
- "trlon": -107.97962089843747,
- "bllat": 44.50185267072875, # lat/lon of bottom left corner
- "bllon": -108.25427910156247,
- "clean_diags": False, # clean 2x2 diagonals
- "poly_file": None, # path to a local kml file
- "polyURL": None, # URL to a publicly readable(!) kml file on Google Drive
- "importedDEM": None, # if not None, the raster file to use as DEM instead of using GEE (null in JSON)
- "printres": 0.5, # resolution (horizontal) of 3D printer (= size of one pixel) in mm
- "ntilesx": 1, # number of tiles in x and y
- "ntilesy": 1,
- "tilewidth": 80, # width of each tile in mm (<- !!!!!), tile height is calculated
- "basethick": 1, # thickness (in mm) of printed base
- "zscale": 1.0, # elevation (vertical) scaling
- "fileformat": "STLb", # format of 3D model files: "obj" wavefront obj (ascii),"STLa" ascii STL or "STLb" binary STL
- "tile_centered": False, # True-> all tiles are centered around 0/0, False, all tiles "fit together"
- "zip_file_name": "terrain", # base name of zipfile, .zip will be added
- "CPU_cores_to_use" : 0, # 0 means all cores, None (null in JSON!) => don't use multiprocessing
- "max_cells_for_memory_only" : 1000 * 1000, # if raster is bigger, use temp_files instead of memory
-
- # these are the args that could be given "manually" via the web UI
- "no_bottom": False, # omit bottom triangles?
- #"rot_degs": 0, # rotate by degrees ccw # CH disabled for now
- "bottom_image": None, # 1 band greyscale image used for bottom relief
- "ignore_leq": None, # set values <= this to NaN, so they are ignored
- "lower_leq": None, # e.g. [0.0, 2.0] values <= 0.0 will be lowered by 2mm in the final model
- "unprojected": False, # don't project to UTM, only usefull when using GEE for DEM rasters
- "only": None,# list of tile index [x,y] with is the only tile to be processed. None means process all tiles (index is 1 based)
- "importedGPX": None, # Plot GPX paths from files onto the model.
- "gpxPathHeight": 100, # Currently we plot the GPX path by simply adjusting the raster elevation at the specified lat/lon,
- # therefore this is in meters. Negative numbers are ok and put a dent in the model
- "gpxPixelsBetweenPoints" : 20, # GPX Files haves a lot of points. A higher number will create more space between lines drawn
- # on the model and can have the effect of making the paths look a bit cleaner
- "gpxPathThickness" : 5, # Stack parallel lines on either side of primary line to create thickness.
- "smooth_borders": True, # smooth borders
- "offset_masks_lower": None, # e.g. [[filename, offset], [filename2, offset2],...] Masked regions (pixel values > 0) in the file will be lowered by offset(mm) * pixel value in the final model.
- "fill_holes": None, # e.g. [10, 7] Specify number of interations to find a neighbor threshold to fill holes. -1 iterations will continue iterations until no more holes are found. Defaults to 7 neighbors in a 3x3 footprint with elevation > 0 to fill a hole with the average of the footprint.
- "min_elev" : None, # None means: will be calculated from actual elevation later. min_elev defines the elevation that will be at base_thickness
- }
-
- # overwrite settings in args
- for k in overwrite_args:
- args[k] = overwrite_args[k]
+ args["config_path"] = json_fname
+
+ for k in list(json_args.keys()):
+ args[k] = json_args[k] # try to find a user set value for k in json config file
+
+ extract_dir = None
+ if len(sys.argv) > 2:
+ extract_dir = sys.argv[2]
+
+ # else:
+ # # no JSON config file given, setting config values in code
+ # # you can comment out lines for which you don't want to overwrite the default settings
+
+ # # overwrite settings in args
+ # for k in default_args:
+ # args[k] = default_args[k]
# print out current args
@@ -191,6 +116,10 @@ def main():
# for local DEM, get the full path to it
if not args["importedDEM"] == None:
args["importedDEM"] = abspath(args["importedDEM"])
+
+ # for local DEM, get the full path to it
+ if not args["importedDEM_interp"] == None:
+ args["importedDEM_interp"] = abspath(args["importedDEM_interp"])
# get full path to offset mask TIFF
if not args["offset_masks_lower"] == None and len(args["offset_masks_lower"]) > 0:
@@ -198,14 +127,14 @@ def main():
offset_mask_pair[0] = abspath(offset_mask_pair[0])
# Give all config values to get_zipped_tiles for processing:
- totalsize, full_zip_file_name = TouchTerrain.get_zipped_tiles(**args) # all args are in a dict
+ totalsize, full_zip_file_name = TouchTerrain.get_zipped_tiles(args) # all args are in a dict
print("\nCreated zip file", full_zip_file_name, "%.2f" % totalsize, "Mb")
- # Optional: unzip the zip file into the current folder
- if 0: # set this to 0 if you don't want the zip file to be unzipped
+ # Optional: unzip the zip file into the current folder or folder specified as the second CLI param
+ if 1: # set this to 0 if you don't want the zip file to be unzipped
#import os.path
#folder, file = os.path.splitext(full_zip_file_name) # tmp folder
- folder = os.getcwd() + os.sep + args["zip_file_name"]# new stl folder in current folder
+ folder = extract_dir if extract_dir else (os.getcwd() + os.sep + os.path.splitext(os.path.basename(full_zip_file_name))[0]) # new stl directly in extract_dir or new stl in zipfile name folder in current folder
# unzip the zipfile into the folder it's already in
import zipfile
@@ -214,24 +143,25 @@ def main():
zip_ref.close()
print("unzipped file inside", full_zip_file_name, "into", folder)
- # Optional: show the STL files in a browser
- import k3d
- # get all stl files in that folder
- from glob import glob
- mesh_files = glob(folder + os.sep + "*.stl")
- print "in folder", folder, "using", mesh_files
-
- plot = k3d.plot()
-
- for m in mesh_files:
- print m
- buf = open(m, 'rb').read()
- #buf = str(stl_model)
- #buf = buf.encode('utf_32')
- print buf[:100]
- plot += k3d.stl(buf)
-
- plot.display()
+ # Optional: show the STL files in a browser
+ if 0: # set this to 0 if you don't want to show STL files in browser
+ import k3d
+ # get all stl files in that folder
+ from glob import glob
+ mesh_files = glob(folder + os.sep + "*.stl")
+ print("in folder", folder, "using", mesh_files)
+
+ plot = k3d.plot()
+
+ for m in mesh_files:
+ print(m)
+ buf = open(m, 'rb').read()
+ #buf = str(stl_model)
+ #buf = buf.encode('utf_32')
+ print(buf[:100])
+ plot += k3d.stl(buf)
+
+ plot.display()
diff --git a/environment-dev.yml b/environment-dev.yml
new file mode 100644
index 00000000..7b90c788
--- /dev/null
+++ b/environment-dev.yml
@@ -0,0 +1,8 @@
+name: touchterrain-dev
+channels:
+ - conda-forge
+dependencies:
+ - gdal
+ - pip
+ - pip:
+ - -e .
\ No newline at end of file
diff --git a/environment.yml b/environment.yml
index 1b107434..ce381d7f 100644
--- a/environment.yml
+++ b/environment.yml
@@ -1,45 +1,8 @@
-name: notebook
+name: touchterrain
channels:
- conda-forge
-dependencies: # for geemap in a notebook, avoiding the model not found error
- - cartopy
- - datapane
- - flask>=2.0.0
- - flask-caching
+dependencies:
- gdal
- - geemap>=0.11.0
- - geopandas
- - imageio
- - ipyvtklink
- - keplergl
- - laspy
- - leafmap>=0.7.2
- - localtileserver>=0.4.0
- - osmnx
- pip
- - pydeck
- - pyntcloud
- - python>=3.8
- - pyvista
- - requests
- - rio-cogeo
- - tifffile
- - xarray_leaflet
-
- # for running touchterrain in a notebook
- - pillow>=6.0.0
- - google-api-python-client==1.12.8
- - earthengine-api>=0.1.232
- - oauth2client>=4.1.3
- - numpy>=1.17
- - scipy>=1.2
- - kml2geojson>=4.0.2
- - geojson>=2.5
- - defusedxml>=0.6
- - six>=1.15.0
- - k3d>=2.14
-
- # touchterrain will be installed via postBuild!
-
-
-
+ - pip:
+ - .
\ No newline at end of file
diff --git a/old/environment.yml b/old/environment.yml
new file mode 100644
index 00000000..7a41f931
--- /dev/null
+++ b/old/environment.yml
@@ -0,0 +1,46 @@
+name: notebook
+channels:
+ - conda-forge
+dependencies: # for geemap in a notebook, avoiding the model not found error
+ - cartopy
+ - datapane
+ - flask>=2.0.0
+ - flask-caching
+ - gdal
+ - geemap>=0.11.0
+ - geopandas
+ - imageio
+ - ipyvtklink
+ - keplergl
+ - laspy
+ - leafmap>=0.7.2
+ - localtileserver>=0.4.0
+ - osmnx
+ - pip
+ - pydeck
+ - pyntcloud
+ - python>=3.8
+ - pyvista
+ - requests
+ - rio-cogeo
+ - tifffile
+ - xarray_leaflet
+ - shapely>=2.1.0
+
+ # for running touchterrain in a notebook
+ - pillow>=6.0.0
+ - google-api-python-client==1.12.8
+ - earthengine-api>=0.1.232
+ - oauth2client>=4.1.3
+ - numpy>=1.17
+ - scipy>=1.2
+ - kml2geojson>=4.0.2
+ - geojson>=2.5
+ - defusedxml>=0.6
+ - six>=1.15.0
+ - k3d>=2.14
+
+ # touchterrain will be installed via postBuild!
+
+
+
diff --git a/setup.py b/old/setup.py
similarity index 95%
rename from setup.py
rename to old/setup.py
index 40ad69d9..4797e2ea 100644
--- a/setup.py
+++ b/old/setup.py
@@ -5,41 +5,41 @@
version='3.7.0', # Feb. 17, 2025
description='Framework for converting raster based Digital Elevation Models (online or local) into 3D printable terrain models (STL)',
url='https://github.com/ChHarding/TouchTerrain_for_CAGEO',
- license='MIT',
+ license='GPLv3',
classifiers=[
'Programming Language :: Python :: 3',
],
keywords='elevation terrain 3D-printing geotiff STL',
- python_requires='>=3.8, <4',
+ python_requires='>=3.11, <4',
author="Chris Harding",
author_email="charding@iastate.edu",
packages=find_namespace_packages(include=["touchterrain.*"]), # should only be server and common
include_package_data=True,
install_requires=[
- 'Pillow>=6.0.0',
- 'google-api-python-client>=2.6',
+ 'defusedxml>=0.6', # safe minidom for parsing kml
'earthengine-api>=0.1.232',
- 'oauth2client>=4.1.3',
+ 'geojson>=2.5', # for wrapping polygon data
+ 'google-api-python-client>=2.6',
+ "httplib2>=0.22.0",
+ "imageio>=2.36.0",
+ "k3d>=2.16.1",
+ 'kml2geojson>=4.0.2', # for reading polygon coords from kml
+ "matplotlib>=3.9.2",
'numpy>=1.17',
+ 'oauth2client>=4.1.3',
+ 'Pillow>=6.0.0',
'scipy>=1.2', # Only needed for hole filling functionality
- 'kml2geojson>=4.0.2', # for reading polygon coords from kml
- 'geojson>=2.5', # for wrapping polygon data
- 'defusedxml>=0.6', # safe minidom for parsing kml
'six>=1.15.0', # earthengine apparently uses an old version of six ...
#'GDAL>3.4.3', # Installation via pip requires a C++ compiler: https://visualstudio.microsoft.com/visual-cpp-build-tools
# with conda: conda install -c conda-forge gdal
# Prebuilds (.whl) : https://www.lfd.uci.edu/~gohlke/pythonlibs/#gdal but stopped with 3.4.3
# Still active: https://www.nuget.org/packages?q=GDAL but uses a .Net package manager
- "imageio>=2.36.0",
- "k3d>=2.16.1",
- "httplib2>=0.22.0",
- "matplotlib>=3.9.2",
],
extras_require={
'server': [ # Not sure which of the above could also be server-only
+ 'Flask>=2.0.0',
'gunicorn>=20.0.4',
- 'Flask>=1.0.2',
],
},
-)
\ No newline at end of file
+)
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 00000000..2869da43
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,79 @@
+[build-system]
+requires = ["setuptools>=61"]
+build-backend = "setuptools.build_meta"
+
+[project]
+name = "touchterrain"
+version = "3.8.0"
+description = "Framework for converting raster based Digital Elevation Models (online or local) into 3D printable terrain models (STL)"
+readme = "ReadMe.md"
+requires-python = ">=3.11,<4"
+license = { text = "GPLv3" }
+authors = [
+ { name = "Chris Harding", email = "charding@iastate.edu" },
+]
+keywords = ["elevation", "terrain", "3D-printing", "geotiff", "STL"]
+classifiers = [
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers",
+ "Topic :: Multimedia :: Graphics :: 3D Modeling",
+ "Programming Language :: Python :: 3",
+]
+dependencies = [
+ "defusedxml>=0.7.1",
+ "earthengine-api>=1.2.0",
+ "geojson>=2.5",
+ "geopandas",
+# "gdal>=3.2.1,<3.10", # Commented out because pip may have issues compiling using VS build tools. It is better to install gdal via "conda install -c conda-forge gdal"
+ "google-api-python-client>=2.151.0",
+ "httplib2>=0.22.0",
+ "imageio>=2.36.0",
+ "k3d>=2.16.1",
+ "kml2geojson>=5.1.0",
+ "matplotlib>=3.9.2",
+ "numpy>=2.1.2",
+ "oauth2client>=4.1.3",
+ "pillow>=10.4.0",
+ "pytest",
+ "requests>=2.0.0",
+ "scipy>=1.14.1",
+ "shapely>=2.1.0",
+ "six>=1.16.0",
+]
+
+[project.optional-dependencies]
+server = [
+ "Flask>=3.0.3",
+ "gunicorn>=20.0.4",
+]
+notebook = [
+ "cartopy",
+ "datapane",
+ "flask-caching",
+ "geemap>=0.11.0",
+ "ipykernel",
+ "ipyvtklink",
+ "ipywidgets",
+ "jupyterlab",
+ "keplergl",
+ "laspy",
+ "leafmap>=0.7.2",
+ "localtileserver>=0.4.0",
+ "notebook",
+ "osmnx",
+ "pydeck",
+ "pyntcloud",
+ "pyvista",
+ "rio-cogeo",
+ "tifffile",
+ "xarray_leaflet",
+]
+
+[project.urls]
+Homepage = "https://github.com/ChHarding/TouchTerrain_for_CAGEO"
+
+[tool.setuptools]
+include-package-data = true
+
+[tool.setuptools.packages.find]
+include = ["touchterrain", "touchterrain.*"]
diff --git a/requirements.txt b/requirements.txt
deleted file mode 100644
index a28fa410..00000000
--- a/requirements.txt
+++ /dev/null
@@ -1,26 +0,0 @@
-Pillow>=10.4.0
-google-api-python-client>=2.151.0
-earthengine-api>=1.2.0
-Flask>=3.0.3
-oauth2client>=4.1.3
-numpy>=2.1.2
-scipy>=1.14.1
-kml2geojson>=5.1.0
-geojson>=2.5
-defusedxml>=0.7.1
-six>=1.16.0
-imageio>=2.36.0
-k3d>=2.16.1
-httplib2>=0.22.0
-matplotlib>=3.9.2
-
-
-# GDAL is tricky to install, at least on Windows, as it needs to be compiled.
-# You can get a pre-complied version of GDAL here:
-# https://github.com/cgohlke/win_arm64-wheels
-# https://github.com/cgohlke/geospatial-wheels/releases
-# Go to the current Release - Assets (may need to list ALL assests) and grab the one
-# for your python version and windows system e.g. GDAL-3.4.3-cp310-cp310-win_amd64.whl
-# Then install it with pip install GDAL-3.4.3-cp310-cp310-win_amd64.whl
-gdal>=3.2.1,<=3.9.3
-gunicorn>=20.0.4
diff --git a/setup.md b/setup.md
new file mode 100644
index 00000000..a82c3e53
--- /dev/null
+++ b/setup.md
@@ -0,0 +1,135 @@
+# Setup a workspace to run Touch Terrain
+
+## Code and Environment creation
+
+1. Get the Touch Terrain code
+
+ ```sh
+ git clone XXX
+ cd ./TOUCHTERRAIN_FOR_CAGEO
+ ```
+
+1. Create or switch to the Python environment
+
+ - Create a Python environment `touchterrain` and install deps for the current folder.
+
+OR
+
+ - Create the blank Python environment
+
+ ```sh
+ # create new conda env called `touchterrain-dev`
+ conda create --name touchterrain-dev
+ # activate the virtual environment called touchterrain-dev
+ conda activate touchterrain-dev
+ ```
+
+## Update Environment with Dependencies
+
+The environment dependencies can be installed through conda or pip.
+
+`.` is the python package configured with `pyproject.toml` in current folder. The current folder should be the root of the Touch Terrain code folder.
+
+- Touch Terrain dependencies are specified in `pyproject.toml`
+
+- If you want to run Touch Terrain as server or in a juypter notebook, add `server` or `notebook` to the pip dependency path like `.` -> `.[notebook]`.
+
+### Conda-based
+
+Update conda env with Touch Terrain deps from an `environment.yml` variant. `conda` will install `pip` and automatically use `pip` to install `touchterrain` package and its dependencies.
+
+Dependency structure
+
+- `conda env ---dependency--> gdal`
+- `conda env ---dependency--> pip ---dependency--> . [touchterrain] ---dependency--> other pkgs`
+
+#### Touch Terrain (normal)
+
+Install touchterrain as a normal package in pip. Use this if you do not plan to make changes to the Touch Terrain code.
+
+```sh
+conda env update --file environment.yml --name touchterrain-dev --prune
+```
+
+`touchterrain` package (aka the code) will be likely be installed (aka copied) to conda env sitepackages in `C:\Users\XXX\.conda\envs\ENV_NAME\Lib`
+
+#### Touch Terrain (development)
+
+Install touchterrain as an **editable** package in pip. The package is linked to the current directory. Changes in source code are reflected in the "installed" package without needing to reinstall.
+
+```sh
+conda env update --file environment-dev.yml --name touchterain-dev --prune
+```
+
+#### Install `gdal` via conda from conda-forge source
+
+```sh
+conda install -c conda-forge gdal
+```
+
+> `--prune` will remove ALL packages not specified in the current target yml file and removed packages include gdal if gdal was previously installed via conda. If you want to update environment from the yml file, you can either drop `--prune` to not remove conda gdal or reinstall gdal afterwards each time.
+
+### pip-based
+
+You can also install Touch Terrain and its deps directly via `pip` without calling `pip` through `conda`.
+
+Dependency structure
+
+- `conda env ---dependency--> gdal`
+- `pip ---dependency--> . [touchterrain] ---dependency--> other pkgs`
+
+#### Install `gdal` via conda from conda-forge source
+
+```sh
+conda install -c conda-forge gdal
+```
+
+#### Normal
+
+```sh
+pip install .
+```
+
+#### Development
+
+Install touchterrain as a module in "editable" state so it links back to the local development code folder
+
+```sh
+pip install -e .
+```
+
+## Environment Verification
+
+Conda shows packages from both conda and pip and where they are from (pypi (pip) or conda-forge (conda))
+
+```sh
+conda list
+```
+
+```sh
+pip list -v
+```
+
+should list a line for touchterrain if installed as `editable` like
+
+```
+touchterrain 3.7.0 C:\Users\XXX\development\TouchTerrain_for_CAGEO
+```
+
+
+## Environment Deletion
+
+```sh
+conda env list
+conda deactivate touchterrain-dev
+conda env remove --name touchterrain-dev
+```
+
+```sh
+conda env list
+conda deactivate tt
+conda env remove --name tt
+conda create --name tt
+conda activate tt
+conda env update --file environment.yml --name tt --prune
+```
diff --git a/setup.sh b/setup.sh
new file mode 100644
index 00000000..24cbd771
--- /dev/null
+++ b/setup.sh
@@ -0,0 +1,42 @@
+#sitepackages in C:\Users\XXX\.conda\envs\touchterrain-dev\Lib
+
+git clone XXX
+cd ./TOUCHTERRAIN_FOR_CAGEO
+# create new env
+conda create --name touchterrain-dev
+# With a new virtual environment called touchterrain-dev
+conda activate touchterrain-dev
+
+## Option A:
+
+# Update conda env with touch terrain requirements from environment.yml
+conda env update --name touchterrain-dev --file environment.yml --prune #old env
+conda env update --name touchterrain-dev --file environment-base.yml --prune # Install touchterrain as a normal package in pip (conda > dep > pip > dep > . [current touchterrain directory]).
+conda env update --name touchterrain-dev --file environment-dev.yml --prune # Install touchterrain as an editable package linked to the current touchterrain code development directory so local changes are used in the package.
+
+# Then install gdal via conda from conda-forge.
+conda install -c conda-forge gdal
+
+# --prune will remove ALL packages not specified in the current target yml file and removed packages include gdal if gdal was installed via conda. If you want to update environment from the yml file, you can either drop --prune to not remove conda gdal or reinstall gdal afterwards each time.
+
+## Option B:
+
+#
+conda install -c conda-forge gdal
+
+# Install touchterrain as a module in "editable" state so it links back to the local development code folder
+pip install -e .
+
+# Conda shows packages from both conda and pip and where they are from (pypi (pip) or conda-forge (conda))
+conda list
+
+# Verification
+pip list -v
+# should list a line for touchterrain like
+# touchterrain 3.7.0 C:\Users\XXX\development\TouchTerrain_for_CAGEO
+
+
+#delete env
+conda env list
+conda deactivate touchterrain-dev
+conda env remove --name touchterrain-dev
\ No newline at end of file
diff --git a/test/test_TouchTerrain_standalone.py b/test/test_TouchTerrain_standalone.py
index 3b40604b..a17bfb06 100644
--- a/test/test_TouchTerrain_standalone.py
+++ b/test/test_TouchTerrain_standalone.py
@@ -44,7 +44,7 @@ def run_get_zipped_tiles(overwrite_args, testname):
for k in sorted(args.keys()):
print("%s = %s" % (k, str(args[k])))
- totalsize, full_zip_file_name = TouchTerrain.get_zipped_tiles(**args)
+ totalsize, full_zip_file_name = TouchTerrain.get_zipped_tiles(args)
#print("In tmp, created zip file", full_zip_file_name, "%.2f" % totalsize, "Mb")
from os import getcwd, sep, remove
diff --git a/test/test_polygon_clipping.py b/test/test_polygon_clipping.py
new file mode 100644
index 00000000..cef53e56
--- /dev/null
+++ b/test/test_polygon_clipping.py
@@ -0,0 +1,253 @@
+import numpy
+import unittest
+import shapely
+
+from touchterrain.common.polygon_clipping import _apply_polygon_clip_updates, find_intersection_geometries, mark_overlapping_edges_for_walls, mark_shared_edges_for_walls
+from touchterrain.common.BorderEdge import BorderEdge
+from touchterrain.common.RasterVariants import RasterVariants
+from touchterrain.common.shapely_plot import plot_shapely_geometries_colormap
+
+def createTestOverlappingEdges() -> list[list[BorderEdge]]:
+ return [
+ [
+ BorderEdge(geometry = shapely.LineString([(0,0), (0,50)]), polygon_line=True)
+ ],
+ [
+ BorderEdge(geometry = shapely.LineString([(0,0), (0,10)]), polygon_line=False),
+ BorderEdge(geometry = shapely.LineString([(0,10), (0,20)]), polygon_line=True),
+ BorderEdge(geometry = shapely.LineString([(0,20), (0,30)]), polygon_line=False),
+ BorderEdge(geometry = shapely.LineString([(0,30), (0,50)]), polygon_line=True)
+ ]
+ ]
+
+def createTestPolygonCellIntersectionData() -> tuple[shapely.Polygon, list[list[tuple[float, float]]]]:
+
+ '''
+ |-------
+ | /
+ | \\
+ | | <- cell vertical boundary. Clipping portion here is (10,15)<>(10,20)
+ | \\
+ | /
+ |_______|
+ '''
+ clippingPrint2DPoly = shapely.Polygon([(0, 0), (10, 0), (10, 5), (15, 10), (10, 15), (10, 20), (5, 25), (10, 30), (5, 30), (0, 25), (0, 0)])
+
+ # quad are arranged in 3x2 (Y,X). Vertices in CCW order NW SW SE NE
+ quadPrint2DCoords1 = [(1, 30), (1.0, 1), (10, 1), (10, 30), (1, 30)]
+ quadPrint2DCoords2 = [(10, 30), (10.0, 1), (19, 1), (19, 30), (10, 30)]
+ quadPrint2DCoords3 = [(1, 1), (1.0, -28), (10, -28), (10, 1), (1, 1)]
+ quadPrint2DCoords4 = [(10, 1), (10.0, -28), (19, -28), (19, 1), (10, 1), (10, 1)]
+ # quad 5 and 6 are outside below the clipping polygon
+ quadPrint2DCoords5 = [(1, -28), (1.0, -57), (10, -57), (10, -28), (1, -28)]
+ quadPrint2DCoords6 = [(10, -28), (10.0, -57), (19, -57), (19, -28), (10, -28)]
+
+ return (clippingPrint2DPoly, [
+ quadPrint2DCoords1,
+ quadPrint2DCoords2,
+ quadPrint2DCoords3,
+ quadPrint2DCoords4,
+ quadPrint2DCoords5,
+ quadPrint2DCoords6])
+
+class TestPolygonClipping(unittest.TestCase):
+
+ def test_find_intersection_geometries(self):
+ testData = createTestPolygonCellIntersectionData()
+ clippingPrint2DPoly = testData[0]
+
+ raster_variants = RasterVariants(original=numpy.ones((3, 2)), nan_close=None, dilated=None, edge_interpolation=None)
+ raster_variants.polygon_intersection_geometry = numpy.full(raster_variants.original.shape, None, dtype=object)
+ raster_variants.polygon_intersection_edge_buckets = numpy.full(raster_variants.original.shape, None, dtype=object)
+ raster_variants.polygon_intersection_contains_properly = numpy.zeros(raster_variants.original.shape, dtype=bool)
+
+ clippingPrint2DPolyIndexMap = numpy.arange(6).reshape(raster_variants.original.shape)
+
+ disjoint_cells = []
+ geom_updates = []
+ edge_updates = []
+ contains_updates = []
+
+ for j in range(0, raster_variants.original.shape[0]): # Y
+ for i in range(0, raster_variants.original.shape[1]): # X
+ quad_coords = testData[1][clippingPrint2DPolyIndexMap[j][i]]
+ disjoint, geoms, edge_buckets, contains_properly = find_intersection_geometries(
+ clippingPrint2DPoly=clippingPrint2DPoly,
+ quadPrint2DCoords=quad_coords,
+ )
+
+ if disjoint:
+ disjoint_cells.append((j, i))
+
+ if geoms:
+ geom_updates.append((j, i, geoms))
+
+ if edge_buckets and any(len(v) > 0 for v in edge_buckets.values()):
+ edge_updates.append((j, i, edge_buckets))
+
+ if contains_properly:
+ contains_updates.append((j, i))
+
+ _apply_polygon_clip_updates(
+ surface_raster_variant=[raster_variants],
+ top_hint=None,
+ updates=(disjoint_cells, geom_updates, edge_updates, contains_updates),
+ )
+
+ self.assertTrue(~numpy.isnan(raster_variants.original[0][0]))
+ self.assertTrue(~numpy.isnan(raster_variants.original[0][1]))
+ self.assertTrue(~numpy.isnan(raster_variants.original[1][0]))
+ self.assertTrue(~numpy.isnan(raster_variants.original[1][1]))
+ self.assertTrue(numpy.isnan(raster_variants.original[2][0]))
+ self.assertTrue(numpy.isnan(raster_variants.original[2][1]))
+
+ mark_shared_edges_for_walls(polygon_intersection_edge_buckets=raster_variants.polygon_intersection_edge_buckets, elevation_raster=raster_variants.original, direction=(-1,-1))
+
+ quadPolys = list(map(lambda x:shapely.Polygon(x),testData[1]))
+ basePolys = [testData[0]] + quadPolys
+
+ intersectionPolys = []
+ edgeBucketsFlattenedPerCell = []
+ for j in range(0, raster_variants.original.shape[0]): # Y
+ for i in range(0, raster_variants.original.shape[1]): # X
+ # Debug only show results from a specific cell
+ # if (j != 0 or i != 1) and (j != 0 or i != 0):
+ # continue
+
+ # if raster_variants.polygon_intersection_geometry[j][i] is not None:
+ # intersectionPolys += [
+ # raster_variants.polygon_intersection_geometry[j][i]
+ # ]
+
+ if raster_variants.polygon_intersection_edge_buckets[j][i] is not None:
+ edgeBucketsFlattenedPerCell += [
+ raster_variants.polygon_intersection_edge_buckets[j][i]['N'] +
+ raster_variants.polygon_intersection_edge_buckets[j][i]['W'] +
+ raster_variants.polygon_intersection_edge_buckets[j][i]['S'] +
+ raster_variants.polygon_intersection_edge_buckets[j][i]['E'] +
+ raster_variants.polygon_intersection_edge_buckets[j][i]['other']
+ ]
+
+ self.assertTrue(len(raster_variants.polygon_intersection_edge_buckets[0][0]['N']) == 1)
+ self.assertTrue(raster_variants.polygon_intersection_edge_buckets[0][0]['N'][0].make_wall)
+
+ plot_shapely_geometries_colormap(basePolys=basePolys, intersectionPolys=intersectionPolys, edgeBuckets=edgeBucketsFlattenedPerCell)
+
+ def test_mark_overlapping_edges_for_walls(self):
+ testEdges = createTestOverlappingEdges()
+
+ cell_A_edges = testEdges[0]
+ cell_B_edges = testEdges[1]
+
+ mark_overlapping_edges_for_walls(cell_1_edges=cell_A_edges, cell_2_edges=cell_B_edges)
+
+ self.assertTrue(len(cell_A_edges) == 4)
+
+ self.assertTrue(len(cell_A_edges[0].geometry.coords) == 2)
+ self.assertTrue(cell_A_edges[0].geometry.coords[0][0] == 0)
+ self.assertTrue(cell_A_edges[0].geometry.coords[0][1] == 0)
+ self.assertTrue(cell_A_edges[0].geometry.coords[-1][0] == 0)
+ self.assertTrue(cell_A_edges[0].geometry.coords[-1][1] == 10)
+ self.assertTrue(cell_A_edges[0].polygon_line == True)
+ self.assertTrue(cell_A_edges[0].make_wall == True)
+
+ self.assertTrue(len(cell_A_edges[3].geometry.coords) == 2)
+ self.assertTrue(cell_A_edges[3].geometry.coords[0][0] == 0)
+ self.assertTrue(cell_A_edges[3].geometry.coords[0][1] == 30)
+ self.assertTrue(cell_A_edges[3].geometry.coords[-1][0] == 0)
+ self.assertTrue(cell_A_edges[3].geometry.coords[-1][1] == 50)
+ self.assertTrue(cell_A_edges[3].polygon_line == True)
+ self.assertTrue(cell_A_edges[3].make_wall == False)
+
+ self.assertTrue(len(cell_B_edges) == 4)
+
+ self.assertTrue(len(cell_B_edges[0].geometry.coords) == 2)
+ self.assertTrue(cell_B_edges[0].geometry.coords[0][0] == 0)
+ self.assertTrue(cell_B_edges[0].geometry.coords[0][1] == 0)
+ self.assertTrue(cell_B_edges[0].geometry.coords[-1][0] == 0)
+ self.assertTrue(cell_B_edges[0].geometry.coords[-1][1] == 10)
+ self.assertTrue(cell_B_edges[0].polygon_line == False)
+ self.assertTrue(cell_B_edges[0].make_wall == False)
+
+ self.assertTrue(cell_B_edges[1].make_wall == False)
+ self.assertTrue(cell_B_edges[2].make_wall == False)
+
+ self.assertTrue(len(cell_B_edges[3].geometry.coords) == 2)
+ self.assertTrue(cell_B_edges[3].geometry.coords[0][0] == 0)
+ self.assertTrue(cell_B_edges[3].geometry.coords[0][1] == 30)
+ self.assertTrue(cell_B_edges[3].geometry.coords[-1][0] == 0)
+ self.assertTrue(cell_B_edges[3].geometry.coords[-1][1] == 50)
+ self.assertTrue(cell_B_edges[3].polygon_line == True)
+ self.assertTrue(cell_B_edges[3].make_wall == False)
+
+ # Check if all edges are matched (shown by marking for skip)
+ for edge in cell_A_edges:
+ self.assertTrue(edge.skip_future_eval_for_walls == True)
+
+ for edge in cell_B_edges:
+ self.assertTrue(edge.skip_future_eval_for_walls == True)
+
+
+ def test_mark_overlapping_edges_for_walls_flipped(self):
+ testEdges = createTestOverlappingEdges()
+
+ cell_B_edges = testEdges[1]
+ cell_A_edges = testEdges[0]
+
+ mark_overlapping_edges_for_walls(cell_1_edges=cell_B_edges, cell_2_edges=cell_A_edges)
+
+ self.assertTrue(len(cell_A_edges) == 4)
+
+ self.assertTrue(len(cell_A_edges[0].geometry.coords) == 2)
+ self.assertTrue(cell_A_edges[0].geometry.coords[0][0] == 0)
+ self.assertTrue(cell_A_edges[0].geometry.coords[0][1] == 0)
+ self.assertTrue(cell_A_edges[0].geometry.coords[-1][0] == 0)
+ self.assertTrue(cell_A_edges[0].geometry.coords[-1][1] == 10)
+ self.assertTrue(cell_A_edges[0].polygon_line == True)
+ self.assertTrue(cell_A_edges[0].make_wall == True) # don't make wall on cell 2(A) if flipped
+
+ self.assertTrue(len(cell_A_edges[3].geometry.coords) == 2)
+ self.assertTrue(cell_A_edges[3].geometry.coords[0][0] == 0)
+ self.assertTrue(cell_A_edges[3].geometry.coords[0][1] == 30)
+ self.assertTrue(cell_A_edges[3].geometry.coords[-1][0] == 0)
+ self.assertTrue(cell_A_edges[3].geometry.coords[-1][1] == 50)
+ self.assertTrue(cell_A_edges[3].polygon_line == True)
+ self.assertTrue(cell_A_edges[3].make_wall == False)
+
+ self.assertTrue(len(cell_B_edges) == 4)
+
+ self.assertTrue(len(cell_B_edges[0].geometry.coords) == 2)
+ self.assertTrue(cell_B_edges[0].geometry.coords[0][0] == 0)
+ self.assertTrue(cell_B_edges[0].geometry.coords[0][1] == 0)
+ self.assertTrue(cell_B_edges[0].geometry.coords[-1][0] == 0)
+ self.assertTrue(cell_B_edges[0].geometry.coords[-1][1] == 10)
+ self.assertTrue(cell_B_edges[0].polygon_line == False)
+ self.assertTrue(cell_B_edges[0].make_wall == False)
+
+ self.assertTrue(cell_B_edges[1].make_wall == False)
+ self.assertTrue(cell_B_edges[2].make_wall == False)
+
+ self.assertTrue(len(cell_B_edges[3].geometry.coords) == 2)
+ self.assertTrue(cell_B_edges[3].geometry.coords[0][0] == 0)
+ self.assertTrue(cell_B_edges[3].geometry.coords[0][1] == 30)
+ self.assertTrue(cell_B_edges[3].geometry.coords[-1][0] == 0)
+ self.assertTrue(cell_B_edges[3].geometry.coords[-1][1] == 50)
+ self.assertTrue(cell_B_edges[3].polygon_line == True)
+ self.assertTrue(cell_B_edges[3].make_wall == False)
+
+ # Check if all edges are matched (shown by marking for skip)
+ for edge in cell_A_edges:
+ self.assertTrue(edge.skip_future_eval_for_walls == True)
+
+ for edge in cell_B_edges:
+ self.assertTrue(edge.skip_future_eval_for_walls == True)
+
+ # cell_2_edges: list[BorderEdge] = [
+ # BorderEdge(geometry = LineString([(0,0), (0,5)]), polygon_line=False),
+ # BorderEdge(geometry = LineString([(0,5), (5,10)]), polygon_line=False),
+ # BorderEdge(geometry = LineString([(5,10), (0,15)]), polygon_line=False),
+ # BorderEdge(geometry = LineString([(0,15), (0,20)]), polygon_line=False),
+ # BorderEdge(geometry = LineString([(0,20), (7,25)]), polygon_line=False),
+ # BorderEdge(geometry = LineString([(7,25), (0,30)]), polygon_line=False),
+ # BorderEdge(geometry = LineString([(0,30), (0,50)]), polygon_line=False),
+ # ]
diff --git a/touchterrain/common/BorderEdge.py b/touchterrain/common/BorderEdge.py
new file mode 100644
index 00000000..ab16edf3
--- /dev/null
+++ b/touchterrain/common/BorderEdge.py
@@ -0,0 +1,21 @@
+import shapely
+
+class BorderEdge:
+ """Represents an edge on a top/bottom surface and whether it should have a vertical wall generated. Geometry is stored as a shapely.LineString.
+ """
+
+ geometry: shapely.LineString
+
+ polygon_line: bool = False
+ "Is the edge part of a polygon?"
+
+ skip_future_eval_for_walls: bool = False
+ "Should this edge be skipped in the next looped evaluation of edges for walls. This should be true if we have already found a matching edge on the neighboring cell."
+ make_wall: bool = False
+ "Should a vertical wall be generated during create_cell() during create_cell()"
+
+ def __init__(self, geometry: shapely.LineString, polygon_line: bool = False, skip_future_eval_for_walls: bool = False, make_wall: bool = False):
+ self.geometry = geometry
+ self.polygon_line = polygon_line
+ self.skip_future_eval_for_walls = skip_future_eval_for_walls
+ self.make_wall = make_wall
\ No newline at end of file
diff --git a/touchterrain/common/Quad.py b/touchterrain/common/Quad.py
new file mode 100644
index 00000000..77f3e0ee
--- /dev/null
+++ b/touchterrain/common/Quad.py
@@ -0,0 +1,198 @@
+import shapely
+
+from touchterrain.common.Vertex import vertex
+
+class quad:
+ """return list of 2 triangles (counterclockwise) per quad
+ wall quads will NOT subdivide their quad into subquads if they are too skinny
+ as this would require to re-index the entire mesh. However, I left the subdive
+ stuff in in case we want to re-visit it later.
+ """
+ # class attribute, use quad.too_skinny_ratio
+ too_skinny_ratio = 0.1 # border quads with a horizontal vs vertical ratio smaller than this will be subdivided
+
+ vl: list[vertex] = []
+ """Vertices mapping NW SW SE NE
+ - Top 0 1 2 3
+ - Bottom 0 3 2 1
+ """
+
+ # can be just a triangle, if it just any 3 ccw consecutive corners
+ def __init__(self, v0, v1, v2, v3=None):
+ self.vl = [v0, v1, v2, v3]
+ self.subdivide_by = None # if not None, we need to subdivide the quad into that many subquads
+
+ def get_copy(self):
+ ''' returns a copy of the quad'''
+ vl = self.vl[:]
+ cp = quad(vl[0], vl[1], vl[2], vl[3])
+ return cp
+
+ def check_if_too_skinny(self, direction):
+ '''if a border quad is too skinny it will to be subdivided into multiple quads'''
+ #print direction, [str(v) for v in self.vl]
+
+ # order of verts will be different for N,S vs E,W walls!
+ if direction in ("S", "N"): # '-49.50 49.50 0.00 ', '-49.50 49.50 10.00 ', '-50.00 49.50 10.00 ', '-50.00 49.50 0.00 '
+ horz_dist = abs(self.vl[0][0] - self.vl[2][0]) # x diff of v0 and v2
+ max_elev = max(self.vl[1][2], self.vl[2][2]) # max elevation of v1 vs v2
+ min_elev = min(self.vl[0][2], self.vl[3][2]) # min elevation v0 vs v3
+ vert_dist = max_elev - min_elev # z diff of v0 and v1
+ else: # -49.50 50.00 10.00 ', '-49.50 49.50 10.00 ', '-49.50 49.50 0.00 ', '-49.50 50.00 0.00 '
+ horz_dist = abs(self.vl[0][1] - self.vl[1][1]) # y diff of v0 and v1
+ max_elev = max(self.vl[0][2], self.vl[1][2]) # max elevation of v0 vs v1
+ min_elev = min(self.vl[2][2], self.vl[3][2]) # min elevation v2 vs v3
+ vert_dist = max_elev - min_elev # z diff of v0 and v1
+ if vert_dist == 0: return # walls can be 0 height
+
+ ratio = horz_dist / float (vert_dist)
+ #print ratio, quad.too_skinny_ratio, quad.too_skinny_ratio / ratio
+ if ratio < quad.too_skinny_ratio:
+ sb = int(quad.too_skinny_ratio / ratio)
+ self.subdivide_by = sb
+
+ def get_triangles(self, split_rotation: int=0) -> list[tuple[vertex,...]]:
+ "return list of 2 triangles (counterclockwise)"
+ v0,v1,v2,v3 = self.vl[0],self.vl[1],self.vl[2],self.vl[3]
+ t0 = (v0, v1, v2) # verts of first triangle
+
+ # if v3 is None, we only return t0
+ if v3 is None:
+ return [t0]
+
+ t1 = (v0, v2, v3) # verts of second triangle
+
+ if split_rotation != 1 and split_rotation != 2:
+ return [t0,t1]
+
+ splitting_edge_slope_1 = abs(v0.coords[2] - v2.coords[2])
+ splitting_edge_slope_2 = abs(v1.coords[2] - v3.coords[2])
+ if split_rotation == 1:
+ if splitting_edge_slope_1 > splitting_edge_slope_2:
+ t0 = (v0, v1, v3)
+ t1 = (v1, v2, v3)
+ elif split_rotation == 2:
+ if splitting_edge_slope_1 < splitting_edge_slope_2:
+ t0 = (v0, v1, v3)
+ t1 = (v1, v2, v3)
+ else:
+ print(f"Invalid split_rotation config value of {split_rotation}")
+
+ return [t0,t1]
+
+ def get_triangles_in_tuple_float(self, split_rotation: int) -> list[tuple[tuple[float, ...], ...]]:
+ # convert Vertex objects in the tri to tuple[float, ...]
+ quad_tris_in_tuple_float: list[tuple[tuple[float, ...], ...]] = []
+
+ quad_tris = self.get_triangles(split_rotation=split_rotation)
+ for tri in quad_tris:
+ if tri is not None:
+ quad_tris_in_tuple_float.append((tri[0].coords, tri[1].coords, tri[2].coords))
+
+ return quad_tris_in_tuple_float
+
+ def get_triangles_in_polygons(self, split_rotation: int) -> list[shapely.Polygon]:
+ # convert Vertex objects in the tri to list[shapely.Polygon]
+ quad_tris_in_polygons: list[shapely.Polygon] = []
+
+ quad_tris = self.get_triangles(split_rotation=split_rotation)
+ for tri in quad_tris:
+ if tri is not None:
+ quad_tris_in_polygons.append(shapely.Polygon([tri[0].coords, tri[1].coords, tri[2].coords, tri[0].coords]))
+
+ return quad_tris_in_polygons
+
+ '''
+ # splits skinny triangles
+ def get_triangles(self, direction=None):
+ """return list of 2 triangles (counterclockwise) per quad
+ wall quads will subdivide their quad into subquads if they are too skinny
+ """
+ v0,v1,v2,v3 = self.vl[0],self.vl[1],self.vl[2],self.vl[3]
+
+ # do we need to subdivide?
+ if self.subdivide_by is None: # no, either not a wall or a chunky wall
+ t0 = (v0, v1, v2) # verts of first triangle
+ t1 = (v0, v2, v3) # verts of second triangle
+ return (t0,t1)
+
+ else:
+ # subdivde into sub quads and return their triangles
+
+ # order of verts will be different for N,S vs E,W walls!
+ if direction in ("S", "N"): # '-49.50 49.50 0.00 ', '-49.50 49.50 10.00 ', '-50.00 49.50 10.00 ', '-50.00 49.50 0.00 '
+ horz_dist = abs(self.vl[0][0] - self.vl[2][0]) # x diff of v0 and v2
+ max_elev = max(self.vl[1][2], self.vl[2][2]) # max elevation of v1 vs v2
+ min_elev = min(self.vl[0][2], self.vl[3][2]) # min elevation v0 vs v3
+ vert_dist = max_elev - min_elev # z diff of v0 and v1
+ else: # -49.50 50.00 10.00 ', '-49.50 49.50 10.00 ', '-49.50 49.50 0.00 ', '-49.50 50.00 0.00 '
+ horz_dist = abs(self.vl[0][1] - self.vl[1][1]) # y diff of v0 and v1
+ max_elev = max(self.vl[0][2], self.vl[1][2]) # max elevation of v0 vs v1
+ min_elev = min(self.vl[2][2], self.vl[3][2]) # min elevation v2 vs v3
+ vert_dist = max_elev - min_elev # z diff of v0 and v1
+
+
+
+ tri_list = []
+
+ # for finding the height of the sub quads I don't care about the different vert order
+ z_list =[v[2] for v in self.vl]
+ z_top = max(z_list) # z height of the top (take min() b/c one might be higher)
+ z_bot = min(z_list) # z height at bottom
+ z_dist = z_top - z_bot # distance to be
+
+ #self.subdivide_by = 3 # DEBUG
+
+ qheight = z_dist / float(self.subdivide_by) # height (elevation dist) of each quad
+ height_list = [ z_top - qheight * i for i in range(self.subdivide_by+1) ] # list of h
+
+ # make new subquads and return a list of their triangles
+ vl_copy = copy.deepcopy(self.vl) # must make a deep copy, otherwise changing the subquads affect the current quad
+ tl = [] # triangle list
+
+ bottom_height_list = height_list[1:]
+ for n,_ in enumerate(bottom_height_list):
+ v0_,v1_,v2_,v3_ = vl_copy[0], vl_copy[1], vl_copy[2],vl_copy[3] # unroll copy
+ #print n,v0_,v1_,v2_,v3_
+
+ # as order of verts will be different for N,S vs E,W walls we need 2 different cases
+ if direction in ("N", "S"):
+ top_inds = (1,2)
+ bot_inds = (0,3)
+ else:
+ top_inds = (0,1)
+ bot_inds = (2,3)
+
+
+ # top verts
+ if n > 0: # don't change top z for topmost sub quad
+ h = height_list[n]
+ v= vl_copy[top_inds[0]] # first vertex of subquad
+ v.coords[2] = h # set its z value
+ v= vl_copy[top_inds[1]]
+ v.coords[2] = h
+
+ # bottom verts
+ if n < len(bottom_height_list): # don't change bottom z for bottommost sub quad
+ h = height_list[n+1]
+ v = vl_copy[bot_inds[0]]
+ v.coords[2] = h
+ v = vl_copy[bot_inds[1]]
+ v.coords[2] = h
+
+ # make a sub quad
+ sq = copy.deepcopy(quad(vl_copy[0], vl_copy[1], vl_copy[2],vl_copy[3])) # each subquad needs to be its own copy
+ #print n, sq,
+
+ t0,t1 = sq.get_triangles()
+ tl.append(t0)
+ tl.append(t1)
+
+ return tl
+ '''
+
+ def __str__(self):
+ rs =" "
+ for n,v in enumerate(self.vl):
+ rs = rs + "v" + str(n) + ": " + str(v) + " "
+ return rs
\ No newline at end of file
diff --git a/touchterrain/common/RasterVariants.py b/touchterrain/common/RasterVariants.py
new file mode 100644
index 00000000..c1ea990c
--- /dev/null
+++ b/touchterrain/common/RasterVariants.py
@@ -0,0 +1,181 @@
+from typing import Union, Any, Callable
+import numpy as np
+
+class RasterVariants:
+ """Holds a raster with processed copies of it
+ """
+
+ original: Union[None, np.ndarray] # Original full raster
+ """
+ Original raster.
+
+ ## Normal mode:
+
+ Top: The original.
+
+ ## Difference mode:
+
+ Top: original
+
+ Bottom: Original, but ALL areas matched to top_hint mask are set to bottom_floor_elev.
+ """
+
+ nan_close: Union[None, np.ndarray] # Raster after NaN close values to bottom and before dilation
+ """
+ Raster after nan close values between top and bottom.
+
+ ## Normal mode:
+
+ Top: Same as original.
+
+ ## Difference mode:
+
+ Top: NaN close values
+
+ Bottom: Original + top_hint mask + NaN close values.
+ """
+ dilated: Union[None, np.ndarray]
+ """
+ Raster after dilation.
+
+ ## Normal mode:
+
+ Top: Same as original.
+ If top_hint provided, original but dilated outwards towards the top_hint mask with bottom_floor_elev value.
+
+
+ ## Difference mode:
+
+ Top: Dilated outwards from the nan_close variant outwards 2x with top.original values
+
+ Bottom: Original + top_hint mask + NaN close values + Dilated outwards 2x with top.original values
+ """
+
+ edge_interpolation: Union[None, np.ndarray] # Original full raster with values past edges for interpolation
+
+
+ polygon_intersection_geometry: Union[None, np.ndarray] #ndarray dtype=object so we can set it with a list[shapely.Geometry]
+ """
+ Intersection geometry between the cell quad and the clipping geometry. In print3DCoordinates. Represented as np.ndarray[list[shapely.Geometry]] The list can include LineString/Polygon. The Polygon geometries are used for making top/bottom surface for a cell.
+
+ This is not a variant!
+ - The precomputed intersecting geometries for a single cell Y,X location that applies across all variants. The cell may not be initialized yet.
+ - This is not padded.
+
+ Raster values set to NaN and no polygon_intersection_geometry set if the cell quad is disjoint from the clipping polygon.
+
+ Raster value kept as imported and polygon_intersection_geometry if there is any intersection between cell and clipping polygon. We determine wall marking by comparing edge buckets + dilated elevation raster + L/PL (line / polygon line) between shared edges.
+
+ Before create_cell() is called, we set the polygon_intersection_geometry to None for all cells that are contained properly in the clipping polygon so that create_cell() can reply solely on a RasterVariants for info and know to use the quad for enclosed cells so flipping may be applied.
+ """
+
+ polygon_intersection_edge_buckets: Union[None, np.ndarray] #ndarray dtype=object so we can set it with a dict[str,list[BorderEdge]]
+ """
+ Clipping intersection lines that overlap the normal quad edges in the 4 cardinal directions. Dict keys of 'N' 'W' 'S' 'E' 'other'. Represented as np.ndarray[dict[str,list[BorderEdge]]]. The BorderEdges along the side of a cell are used when creating borders (wall) for a cell.
+
+ This is not a variant!
+
+ polygon_intersection_edge_buckets existence is same as polygon_intersection_geometry to indicate disjoint or contained properly.
+
+ TODO: This should be stored in the cell object but we only keep the cell objects as we iterate through them so RasterVariants is the place to store this to maintain state.
+ """
+
+ polygon_intersection_contains_properly: Union[None, np.ndarray] #ndarray dtype=object so we can set it with a bool
+ """
+ Store whether a cell is contains_properly within the clipping polygon
+
+ This is not a variant!
+ """
+
+ def __init__(self, original: Union[None, np.ndarray], nan_close: Union[None, np.ndarray], dilated: Union[None, np.ndarray], edge_interpolation: Union[None, np.ndarray]):
+ self.original = original
+ self.nan_close = nan_close
+ self.dilated = dilated
+ self.edge_interpolation = edge_interpolation
+
+ self.polygon_intersection_geometry = None
+ self.polygon_intersection_edge_buckets = None
+ self.polygon_intersection_contains_properly = None
+
+ def copy_tile_raster_variants(self, start_y, end_y, start_x, end_x):
+ """Create a RasterVariants based on a subset of the current RasterVariants. Arrays are copied.
+ """
+ tile_raster = RasterVariants(None, None, None, None)
+
+ if self.original is not None:
+ tile_raster.original = self.original[start_y:end_y, start_x:end_x].copy()
+ if self.nan_close is not None:
+ tile_raster.nan_close = self.nan_close[start_y:end_y, start_x:end_x].copy()
+ if self.dilated is not None:
+ tile_raster.dilated = self.dilated[start_y:end_y, start_x:end_x].copy()
+ if self.edge_interpolation is not None:
+ tile_raster.edge_interpolation = self.edge_interpolation[start_y:end_y, start_x:end_x].copy()
+
+ if self.polygon_intersection_geometry is not None:
+ tile_raster.polygon_intersection_geometry = self.polygon_intersection_geometry[start_y:end_y, start_x:end_x].copy()
+ if self.polygon_intersection_edge_buckets is not None:
+ tile_raster.polygon_intersection_edge_buckets = self.polygon_intersection_edge_buckets[start_y:end_y, start_x:end_x].copy()
+ if self.polygon_intersection_contains_properly is not None:
+ tile_raster.polygon_intersection_contains_properly = self.polygon_intersection_contains_properly[start_y:end_y, start_x:end_x].copy()
+
+ return tile_raster
+
+ def apply_closure_to_variants(self, f: Callable[[np.ndarray], np.ndarray]):
+ """Run a function on all variants. The function takes a ndarray as input and return a ndarray.
+ """
+ if self.original is not None:
+ self.original = f(self.original)
+ if self.nan_close is not None:
+ self.nan_close = f(self.nan_close)
+ if self.dilated is not None:
+ self.dilated = f(self.dilated)
+ if self.edge_interpolation is not None:
+ self.edge_interpolation = f(self.edge_interpolation)
+
+ def set_location_in_variants(self, location: tuple[int, int], new_value:float, set_edge_interpolation: bool = True):
+ """Set a location to new value on all variants. The function takes a tuple in Y,X order as location and a new value to set.
+ """
+ if self.original is not None:
+ self.original[location[0]][location[1]] = new_value
+ if self.nan_close is not None:
+ self.nan_close[location[0]][location[1]] = new_value
+ if self.dilated is not None:
+ self.dilated[location[0]][location[1]] = new_value
+ if set_edge_interpolation and self.edge_interpolation is not None:
+ self.edge_interpolation[location[0]][location[1]] = new_value
+
+ def __add__(self, other):
+ if self.original is not None:
+ self.original += other
+ if self.nan_close is not None:
+ self.nan_close += other
+ if self.dilated is not None:
+ self.dilated += other
+ if self.edge_interpolation is not None:
+ self.edge_interpolation += other
+
+ return self
+
+ def __sub__(self, other):
+ if self.original is not None:
+ self.original -= other
+ if self.nan_close is not None:
+ self.nan_close -= other
+ if self.dilated is not None:
+ self.dilated -= other
+ if self.edge_interpolation is not None:
+ self.edge_interpolation -= other
+
+ return self
+
+ def __mul__ (self, other):
+ if self.original is not None:
+ self.original *= other
+ if self.nan_close is not None:
+ self.nan_close *= other
+ if self.dilated is not None:
+ self.dilated *= other
+ if self.edge_interpolation is not None:
+ self.edge_interpolation *= other
+
+ return self
\ No newline at end of file
diff --git a/touchterrain/common/TouchTerrainEarthEngine.py b/touchterrain/common/TouchTerrainEarthEngine.py
index ef46b3fc..22c47491 100644
--- a/touchterrain/common/TouchTerrainEarthEngine.py
+++ b/touchterrain/common/TouchTerrainEarthEngine.py
@@ -19,17 +19,22 @@
along with this program. If not, see .
'''
-import sys
-import os
+import copy
+import io
import datetime
-from io import StringIO
-import urllib.request, urllib.error, urllib.parse
+import http.client
+import os
import socket
-import io
+import sys
+import urllib.request, urllib.error, urllib.parse
+
+from typing import Union, Any, cast
from zipfile import ZipFile
-import http.client
+
import numpy
+
from touchterrain.common.config import EE_ACCOUNT,EE_CREDS,EE_PROJECT
+from touchterrain.common.utils import *
DEV_MODE = False
#DEV_MODE = True # will use modules in local touchterrain folder instead of installed ones
@@ -39,9 +44,21 @@
sys.path = ["."] + sys.path # force imports form local touchterain folder
import touchterrain.common
-from touchterrain.common.grid_tesselate import grid # my own grid class, creates a mesh from DEM raster
+from touchterrain.common.grid_tesselate import grid, RasterVariants, ProcessingTile # my own grid class, creates a mesh from DEM raster
+from touchterrain.common.user_config import TouchTerrainConfig
+from touchterrain.common.tile_info import TouchTerrainTileInfo
from touchterrain.common.Coordinate_system_conv import * # arc to meters conversion
from touchterrain.common.utils import save_tile_as_image, clean_up_diags, fillHoles, add_to_stl_list, k3d_render_to_html, dilate_array, plot_DEM_histogram
+
+from touchterrain.common.RasterVariants import RasterVariants
+
+from touchterrain.common.polygon_clipping import find_polygon_clipping_edges, mark_shared_edges_for_walls
+
+import geopandas
+import shapely
+from touchterrain.common.shapely_utils import flatten_geometries, flatten_geometries_borderEdge, sort_line_segment_based_contains
+from touchterrain.common.shapely_plot import plot_shapely_geometries_colormap
+
if DEV_MODE:
sys.path = oldsp # back to old sys.path
@@ -73,26 +90,39 @@
# get root logger, will later be redirected into a logfile
import logging
logger = logging.getLogger()
-logger.setLevel(logging.INFO)
-
-
+logger.setLevel(logging.DEBUG)
+# AL DEC 24: refactored the EE init into a separate func to remove the warning in standalone mode. I don't use EE, so I can't test if this works for EE
# CH test Aug 18: do EE init here only
# this seems to prevent the file_cache is unavailable when using oauth2client >= 4.0.0 or google-auth
# crap from happening. It assumes that any "main" file imports TouchTerrainEarthEngine anyway.
# But, as this could also be run in a standalone scenario where EE should not be involved,
# the failed to EE init messages are just warnings
-try:
- import ee
- # uses .config/earthengine/credentials, since Nov. 2024 this must be a service account json file not a p12 file!
- # Set the path to your JSON key file
- # Authenticate using the service account
- credentials = ee.ServiceAccountCredentials(EE_ACCOUNT, EE_CREDS)
- ee.Initialize(credentials, project=EE_PROJECT)
-except Exception as e:
- logging.warning(f"EE init() error (with {EE_CREDS}) {e} (This is OK if you don't use earthengine anyway!)")
-else:
- logging.info(f"EE init() worked with {EE_CREDS}")
+_ee_initialized = False
+
+def initialize_earth_engine() -> Any | None:
+ """Initialize Google Earth Engine only when it's needed."""
+ global _ee_initialized, ee
+ try:
+ import ee
+ except Exception as e:
+ logger.warning(f"EE init() error (with {EE_CREDS}) {e} (This is OK if you don't use earthengine anyway!)")
+ return None
+
+ if _ee_initialized:
+ return ee
+
+ try:
+ # uses .config/earthengine/credentials, since Nov. 2024 this must be a service account json file not a p12 file!
+ credentials = ee.ServiceAccountCredentials(EE_ACCOUNT, EE_CREDS)
+ ee.Initialize(credentials, project=EE_PROJECT)
+ except Exception as e:
+ logger.warning(f"EE init() error (with {EE_CREDS}) {e} (This is OK if you don't use earthengine anyway!)")
+ return None
+
+ _ee_initialized = True
+ logger.info(f"EE init() worked with {EE_CREDS}")
+ return ee
# utility to print to stdout and to logger.info
def pr(*arglist):
@@ -119,50 +149,6 @@ def pr(*arglist):
"MERIT/DEM/v1_0_3"
]
-
-# Define default parameters
-# Print settings that can be used to initialize the actual args
-initial_args = {
- "DEM_name": 'USGS/3DEP/10m',# DEM_name: name of DEM source used in Google Earth Engine
- "bllat": 39.32205105794382, # bottom left corner lat
- "bllon": -120.37497608519418, # bottom left corner long
- "trlat": 39.45763749030933, # top right corner lat
- "trlon": -120.2002248034559, # top right corner long
- "importedDEM": None, # if not None, the raster file to use as DEM instead of using GEE (null in JSON)
- "printres": 0.4, # resolution (horizontal) of 3D printer (= size of one pixel) in mm
- "ntilesx": 1, # number of tiles in x and y
- "ntilesy": 1,
- "tilewidth": 120, # width of each tile in mm (<- !!!!!), tile height is calculated
- "basethick": 0.6, # thickness (in mm) of printed base
- "zscale": 2.0, # elevation (vertical) scaling
- "fileformat": "STLb", # format of 3D model files: "obj" wavefront obj (ascii),"STLa" ascii STL or "STLb" binary STL
- "tile_centered": False, # True-> all tiles are centered around 0/0, False, all tiles "fit together"
- "zip_file_name": "terrain", # base name of zipfile, .zip will be added
- #"CPU_cores_to_use" : 0, # 0 means all cores, None (null in JSON!) => don't use multiprocessing
- "CPU_cores_to_use" : None, # Special case for setting to SP that cannot be overwritten later
- "max_cells_for_memory_only" : 3000 * 3000, # if raster has more cells, use temp_files instead of memory (slower, but can be huge)
-
- # these are the args that could be given "manually" via the web UI
- "no_bottom": False, # omit bottom triangles?
- #"rot_degs": 0, # rotate by degrees ccw # CH disabled for now
- "bottom_image": None, # 1 band 8-bit greyscale image used for bottom relief
- "ignore_leq": None, # set values <= this to NaN, so they are ignored
- "lower_leq": None, # e.g. [0.0, 2.0] values <= 0.0 will be lowered by 2mm in the final model
- "unprojected": False, # project to UTM? only useful when using GEE for DEM rasters
- "only": None,# list of tile index [x,y] with is the only tile to be processed. None means process all tiles (index is 1 based)
- "importedGPX": [], # list of gpx path file(s) to be use
- "smooth_borders": True, # smooth borders by removing a border triangle?
- "offset_masks_lower": None, # [[filename, offset], [filename2, offset2], ...] offset masks to apply to map
- "fill_holes": None, # [rounds, threshold] hole filling filter iterations and threshold to fill a hole
- "poly_file": None, # local kml file for mask
- "min_elev": None, # min elev to use, None means set by min of all tiles
- "tilewidth_scale": None, # set x/y scale, with None, scale is set automatically by the selected area (region)
- "clean_diags":False, # clean of corner diagonal 1 x 1 islands?
- "bottom_elevation":None,
- "dirty_triangles:":False, # allow degenerate triangles for water
-}
-
-
def make_bottom_raster_from_image(image_file_name, shape):
"""Make a bottom image (numpy array) to be used in the stl model
@@ -215,23 +201,41 @@ def make_bottom_raster_from_image(image_file_name, shape):
# but if it's None, a buffer is made instead.
# the tile info dict (with the file/buffer size) and the buffer (or the file's name) are returns as a tuple
#@profile
-def process_tile(tile_tuple):
- tile_info = tile_tuple[0] # has info for this individual tile
- tile_elev_raster = tile_tuple[1] # the actual (top) raster
- tile_bottom_raster = tile_tuple[2] # the actual (bottom) raster (or None)
- tile_elev_orig_raster = tile_tuple[3] # the original (top) raster (or None)
-
- logger.debug("processing tile:", tile_info['tile_no_x'], tile_info['tile_no_y'])
+def process_tile(processingTile: ProcessingTile):
+ tile_info = processingTile.tile_info #tile_tuple[0] # has info for this individual tile
+ tile_elev_raster: numpy.ndarray
+ # tile_elev_nan_close_pre_dil_raster: numpy.ndarray
+ # tile_elev_orig_full_raster: numpy.ndarray
+ if isinstance(processingTile.top_raster_variants.dilated, numpy.ndarray) == False:
+ pr("top RasterVariants had no dilated raster")
+ return False
+
+ if isinstance(processingTile.top_raster_variants.dilated, numpy.ndarray):
+ tile_elev_raster = processingTile.top_raster_variants.dilated #tile_tuple[1] # the actual (top) raster
+ # if isinstance(processingTile.top_raster_variants.nan_close, numpy.ndarray):
+ # tile_elev_nan_close_pre_dil_raster = processingTile.top_raster_variants.nan_close #tile_tuple[3] # the pre dilation (top) raster (or None)
+ # if isinstance(processingTile.top_raster_variants.original, numpy.ndarray):
+ # tile_elev_orig_full_raster = processingTile.top_raster_variants.original # the original full (top) raster (or None)
+
+ tile_bottom_raster: Union[None, numpy.ndarray] = None
+ # tile_bottom_orig_full_raster: Union[None, numpy.ndarray] = None
+ if isinstance(processingTile.bottom_raster_variants, RasterVariants):
+ if isinstance(processingTile.bottom_raster_variants.dilated, numpy.ndarray):
+ tile_bottom_raster = processingTile.bottom_raster_variants.dilated #tile_tuple[2] # the actual (bottom) raster (or None)
+ # if isinstance(processingTile.bottom_raster_variants.original, numpy.ndarray):
+ # tile_bottom_orig_full_raster = processingTile.bottom_raster_variants.original
+
+ logger.debug("processing tile:", tile_info.tile_no_x, tile_info.tile_no_y)
#print numpy.round(tile_elev_raster,1)
# create a bottom relief raster (values 0.0 - 1.0)
- if tile_info["bottom_image"] != None and tile_info["no_bottom"] != None:
- logger.debug("using " + tile_info["bottom_image"] + " as relief on bottom")
- bottom_raster = make_bottom_raster_from_image(tile_info["bottom_image"], tile_elev_raster.shape)
+ if tile_info.config.bottom_image != None and tile_info.config.no_bottom != None:
+ logger.debug("using " + tile_info.config.bottom_image + " as relief on bottom")
+ bottom_raster = make_bottom_raster_from_image(tile_info.config.bottom_image, tile_elev_raster.shape)
#print "min/max:", numpy.nanmin(bottom_raster), numpy.nanmax(bottom_raster)
- bottom_raster *= (tile_info["base_thickness_mm"] * 0.8) # max relief is 80% of base thickness to still have a bit of "roof"
+ bottom_raster *= (tile_info.config.basethick * 0.8) # max relief is 80% of base thickness to still have a bit of "roof"
print("bottom image (in meters!) min/max:", numpy.nanmin(bottom_raster), numpy.nanmax(bottom_raster)) # range of bottom raster
- elif tile_bottom_raster is not None:
+ elif tile_bottom_raster is not None: # bottom elevation subtraction raster
bottom_raster = tile_bottom_raster # bottom elevation(!) raster
else:
bottom_raster = None # None means bottom is flat
@@ -393,20 +397,23 @@ def process_tile(tile_tuple):
'''
# create a grid object from the raster(s), which later converted into a triangle mesh
- g = grid(tile_elev_raster, bottom_raster, tile_elev_orig_raster, tile_info)
- del tile_elev_raster
- if bottom_raster is not None: del bottom_raster
- if tile_elev_orig_raster is not None: del tile_elev_orig_raster
+ g = grid(tile=processingTile)
+ if g == None:
+ print("grid init failed")
+ return
+ #del tile_elev_raster
+ #if bottom_raster is not None: del bottom_raster
+ #if tile_elev_nan_close_pre_dil_raster is not None: del tile_elev_nan_close_pre_dil_raster
#
# convert grid object into a triangle mesh file
#
- fileformat = tile_info["fileformat"]
+ fileformat = tile_info.config.fileformat
# info on buffer/temp file
- if tile_info.get("temp_file") != None: # contains None or a file name.
- print("Writing tile into temp. file", os.path.realpath(tile_info["temp_file"]), file=sys.stderr)
- temp_fn = tile_info.get("temp_file")
+ if tile_info.temp_file != None: # contains None or a file name.
+ print("Writing tile into temp. file", os.path.realpath(tile_info.temp_file), file=sys.stderr)
+ temp_fn = tile_info.temp_file
else:
print("Writing tile into memory buffer", file=sys.stderr)
temp_fn = None # means: use memory
@@ -427,8 +434,8 @@ def process_tile(tile_tuple):
fsize = len(b) / float(1024*1024)
- tile_info["file_size"] = fsize
- print("tile", tile_info["tile_no_x"], tile_info["tile_no_y"], fileformat, fsize, "Mb ", file=sys.stderr) #, multiprocessing.current_process()
+ tile_info.file_size = fsize
+ print("tile", tile_info.tile_no_x, tile_info.tile_no_y, fileformat, fsize, "Mb ", file=sys.stderr) #, multiprocessing.current_process()
return tile_info, b # return info and buffer/temp_file NAME
@@ -572,185 +579,249 @@ def get_bounding_box(coords):
bllon -= width/100
return trlat, trlon, bllat, bllon
+def get_print3D_dimensions(dem: gdal.Dataset, tile_scale) -> tuple[float, float]:
+ """Calculate the printed 3D dimensions based on scale of 1:tile_scale
+ :return: Tuple of width and height in mm as a float
+ """
+ print("Using tileScale of " + str(tile_scale))
+ dem_pixel_width_x = dem.GetGeoTransform()[1] # Assume pixel units are meters
+ print3D_width_per_tile = dem_pixel_width_x * 1000 * dem.RasterXSize / tile_scale
+ dem_pixel_width_y = abs(dem.GetGeoTransform()[5])
+ print3D_height_per_tile = dem_pixel_width_y * 1000 * dem.RasterYSize / tile_scale
+ return (print3D_width_per_tile, print3D_height_per_tile)
+
+def raster_preparation(top: RasterVariants, bottom: RasterVariants, top_hint: numpy.ndarray|None = None, bottom_thru_base: bool = False, bottom_floor_elev: float|None = None) -> bool:
+ """Prepare rasters by NaN close values, dilate
+
+ Modifies bottom raster.original by setting min_elev in bottom cells where bottom is NaN and top is not NaN!
+ """
+
+ # bottom_thru_base is special flag for thru-bottom rasters
+
+ if top.original is None:
+ print("raster_preparation: top.original is None")
+ return False
+
+ top_npim = top.original.copy()
+ bot_npim = None
+
+
+ # Normal mode
+ if bottom.original is None:
+ # If we have no bottom, we are only generating a "top".
+ if numpy.any(numpy.isnan(top_npim)):
+ # nan_close is set to the original values because we do not NaN close values in this case
+ top.nan_close = top_npim.copy() # save original top before it gets dilated
+
+ if top_hint is not None:
+ # find locations where top is NaN and top_hint is not NaN and dilate with that mask and the bottom_floor_elev value
+ hint_nan_to_base_mask = numpy.logical_and(numpy.isnan(top.original), numpy.logical_not(numpy.isnan(top_hint)))
+
+ if numpy.any(hint_nan_to_base_mask) == True:
+ # Modify original to keep it the same as the bottom raster if difference mesh used in the future
+ top.original[hint_nan_to_base_mask] = bottom_floor_elev # set ALL locs within top_hint mask area with a constant value
+
+ top_npim = dilate_array(top_npim, dilation_source=numpy.full(top_npim.shape, bottom_floor_elev), limit_mask=hint_nan_to_base_mask) # dilate in top_hint mask area with a constant value
+ # Difference mode
+ else:
+ bot_npim = bottom.original.copy()
-def get_zipped_tiles(DEM_name=None, trlat=None, trlon=None, bllat=None, bllon=None, # all args are keywords, so I can use just **args in calls ...
- polygon=None,
- polyURL=None,
- poly_file=None,
- importedDEM=None,
- bottom_elevation=None,
- top_thickness=None,
- printres=1.0, ntilesx=1, ntilesy=1, tilewidth=100,
- basethick=2, zscale=1.0, fileformat="STLb",
- tile_centered=False, CPU_cores_to_use=0,
- max_cells_for_memory_only=500*500*4,
- temp_folder = "tmp",
- zip_file_name="terrain",
- no_bottom=False,
- bottom_image=None,
- ignore_leq=None,
- lower_leq=None,
- unprojected=False,
- only=None,
- original_query_string=None,
- no_normals=True,
- projection=None,
- use_geo_coords=None,
- importedGPX=None,
- gpxPathHeight=25,
- gpxPixelsBetweenPoints=10,
- gpxPathThickness=1,
- map_img_filename=None,
- smooth_borders=True,
- offset_masks_lower=None,
- fill_holes=None,
- min_elev=None,
- tilewidth_scale=None,
- clean_diags=False,
- dirty_triangles=False,
- kd3_render=False,
- **otherargs):
+ # where top is actually lower than bottom (which can happen with Anson's data), set top to bottom
+ top_npim = numpy.where(top_npim < bot_npim, bot_npim, top_npim)
+ top.original = top_npim.copy()
+
+ # find locations where bottom is NaN and top is not NaN and dilate with that mask and the bottom_floor_elev value
+ # Set bottom original locations to bottom_floor_elev (push to base) where original bottom is NaN and original top is not NaN. In Anson's maps: this is for the case where the bottom is originally NaN (bottom is < 0 and under water = NaN bottom) or out at sea (NaN bottom)
+ hint_nan_to_base_mask = numpy.logical_and(numpy.isnan(bottom.original), numpy.logical_not(numpy.isnan(top.original)))
+ if numpy.any(hint_nan_to_base_mask) == True:
+ # Patch the original bottom raster with the new elevations (it not actually "original" anymore but we don't reuse actual original so it is okay)
+ bot_npim[hint_nan_to_base_mask] = bottom_floor_elev # set ALL locs within top_hint mask area with a constant value
+ bottom.original = bot_npim.copy()
+
+ # bool array with True where bottom has NaN values but top does not
+ # this is specific to Anson's way of encoding through-water cells
+ # nan_values = np.logical_and(np.isnan(bot_npim), np.logical_not(np.isnan(npim)))
+ # if np.any(nan_values) == True:
+ # bot_npim[nan_values] = 0 # set bottom NaN values to 0
+ # throughwater = True # flag for easy checking
+
+ # if both have the same value (or very close to) set both to Nan
+ # No relative tolerance here as we don't care about this concept here. Set the abs. tolerance to 0.001 m (1 mm)
+ close_values = numpy.isclose(top_npim, bot_npim, rtol=0, atol=0.001, equal_nan=False) # bool array
+
+ # for any True values in array, set corresponding top and bottom cells to NaN
+ # Also set NaN flags
+ if numpy.any(close_values) == True:
+ top_npim[close_values] = numpy.nan # set close values to NaN
+ # save original top after setting NaNs so we can skip the undilated NaN cells later
+ top.nan_close = top_npim.copy()
+
+ # if diagonal cleanup is requested, we need to do it again after setting NaNs
+ #clean_up_diags_check(top)
+
+ top_npim = dilate_array(top_npim, top.original, dilation_cycles=2) # dilate the NaN'd top with the original (pre NaN'd) top
+ top.dilated = top_npim.copy()
+
+ bot_npim[close_values] = numpy.nan # set close values to NaN
+
+ bottom.nan_close = bot_npim.copy()
+ #clean_up_diags_check(bottom) # re-check for diags
+
+ # Set bottom NaN values to a fixed height where top is not NaN
+ # This just fixes any incontinuities in the bottom mesh such as holes that were filled in the top
+ # bottom_nan_patch_locations = numpy.logical_and(numpy.isnan(bot_npim), numpy.logical_not(numpy.isnan(top.original)))
+ # if numpy.any(bottom_nan_patch_locations) == True:
+ # bot_npim[bottom_nan_patch_locations] = bottom_floor_elev
+
+ if bottom_thru_base == True:
+ #0==0
+ bot_npim = dilate_array(bot_npim) # dilate with 3x3 nanmean #
+ # Set bottom locations where difference mesh should be generated to 0. Avoid setting bottom values at top dilation ring locations because that is where the top mesh will generate a cell.
+ #bottom = top_pre_dil.copy() #use top after setting close values to NaN
+ #bottom[~numpy.isnan(bottom)] = min_elev # set non NaN locations to min_elev or 0
+ else:
+ bot_npim = dilate_array(bot_npim, top.original, dilation_cycles=2) # dilate the NaN'd bottom with the original (pre NaN'd) top (same as original bottom)
+
+
+ # if clean_diags == True:
+ # top_npim = clean_up_diags(top_npim)
+ # if bot_npim is not None:
+ # bot_npim = clean_up_diags(bot_npim)
+ # # TODO: check if this is needed as top NaNs dictate if a cell
+ # # should be skipped or not
+
+ # last step output is always in the dilated slot
+ top.dilated = top_npim.copy()
+ if bot_npim is not None:
+ bottom.dilated = bot_npim.copy()
+ return True
+
+def get_zipped_tiles(user_dict: dict[str, Any]):
"""
args:
- - DEM_name: name of DEM layer used in Google Earth Engine, see DEM_sources
- - trlat, trlon: lat/lon of top right corner of bounding box
- - bllat, bllon: lat/lon of bottom left corner of bounding box
- - polygon: optional geoJSON polygon
- - importedDEM: None (means: get the DEM from GEE) or local file name with (top) DEM to be used instead
- - bottom_elevation (None): elevation raster for the bottom of the model. Must exactly match the sizes and cell resolution of importedDEM
- - top_thickness (None): thickness of the top of the model, i.e. top - thickness = bottom. Must exactly match the sizes and cell resolution of importedDEM
- - printres: resolution (horizontal) of 3D printer (= size of one pixel) in mm
- - ntilesx, ntilesy: number of tiles in x and y
- - tilewidth: width of each tile in mm (<- !!!!!), tile height is calculated automatically
- - basethick: thickness (in mm) of printed base
- - zscale: elevation (vertical scaling)
- - fileformat: format of 3D model files: "obj" = wavefront obj (ascii)
- "STLa" = ascii STL
- "STLb" = binary STL
- "GeoTiff" = DEM raster only, no 3D geometry
- - tile_centered: True-> all tiles are centered around 0/0, False, all tiles "fit together"
- - CPU_cores_to_use: 0 means use all available cores, set to 1 to force single processor use (needed for Paste) TODO: change to True/False
- - max_cells_for_memory_only: if total number of cells is bigger, use temp_file instead using memory only
- - temp_folder: the folder to put the temp files and the final zip file into
- - zip_file_name: name of zipfile containing the tiles (st/obj) and helper files
- - no_bottom: don't create any bottom triangles. The STL file is not watertight but should still print fine with most slicers (e.g. Cura) and is much smaller
- - bottom_image: 1 band greyscale image to use as bottom relief raster, same for _each_ tile! see make_buttom_raster)
- - ignore_leq: ignore elevation values <= this value, good for removing offshore data
- - lower_leq: [threshold, offset] if elevation is lower than threhold, lower it by offset mm. Good for adding emphasis to coastlines. Unaffected by z_scale.
- - unprojected: don't apply UTM projection, can only work when exporting a Geotiff as the mesh export needs x/y in meters
- - only: 2-list with tile index starting at 1 (e.g. [1,2]), which is the only tile to be processed
- - original_query_string: the query string from the app, including map info. Put into log only. Good for making a URL that encodes the app view
- - no_normals: True -> all normals are 0,0,0, which speeds up processing. Most viewers will calculate normals themselves anyway
- - projection: EPSG number (as int) of projection to be used. Default (None) use the closest UTM zone
- - use_geo_coords: None, centered, UTM. not-None forces units to be in meters, centered will put 0/0 at model center for all tiles. Not-None will interpret basethickness to be in multiples of 10 meters (0.5 mm => 5 m)
- - importedGPX: None or List of GPX file paths that are to be plotted on the model
- - gpxPathHeight: Currently we plot the GPX path by simply adjusting the raster elevation at the specified lat/lon, therefore this is in meters. Negative numbers are ok and put a dent in the model
- - gpxPixelsBetweenPoints: GPX Files can have a lot of points. This argument controls how many pixel distance there should be between points, effectively causing fewing lines to be drawn. A higher number will create more space between lines drawn on the model and can have the effect of making the paths look a bit cleaner at the expense of less precision
- - gpxPathThickness: Stack parallel lines on either side of primary line to create thickness. A setting of 1 probably looks the best
- - polyURL: Url to a KML file (with a polygon) as a publically read-able cloud file (Google Drive)
- - poly_file: local KML file to use as mask
- - map_image_filename: image with a map of the area
- - smooth_borders: should borders be optimized (smoothed) by removing triangles?
- - min_elev: overwrites minimum elevation for all tiles
- - tilewidth_scale: divdes m width of selection box by this to get tilewidth (supersedes tilewidth setting)
- - clean_diags: if True, repair diagonal patterns which cause non-manifold edges
- - k3d_render: if True will create a html file containing the model as a k3d object.
-
+ user_dict of user specified settings
returns the total size of the zip file in Mb
"""
+ config: TouchTerrainConfig = TouchTerrainConfig()
+ config.mergeDict(dict=user_dict)
+
# Sanity checks: TODO: use better exit on error instead of throwing an assert exception
- assert fileformat in ("obj", "STLa", "STLb", "GeoTiff"), "Error: unknown 3D geometry file format:" + fileformat + ", must be obj, STLa, STLb (or GeoTiff when using local raster)"
+ assert config.fileformat in ("obj", "STLa", "STLb", "GeoTiff"), "Error: unknown 3D geometry file format:" + config.fileformat + ", must be obj, STLa, STLb (or GeoTiff when using local raster)"
- if bottom_elevation != None:
- assert importedDEM != None, "Error: importDEM local DEM raster file needed for bottom_elevation"
+ if config.bottom_elevation != None:
+ assert config.importedDEM != None, "Error: importDEM local DEM raster file needed for bottom_elevation"
- if importedDEM == None: # GEE as DEM source
- assert DEM_name in DEM_sources, "Error: DEM source must be one of: " + ", ".join(DEM_sources)
- if fileformat != "GeoTiff":
- assert unprojected == False, "Error: STL/OBJ export cannot use unprojected, only available for GeoTiff export"
+ if config.importedDEM == None: # GEE as DEM source
+ assert config.DEM_name in DEM_sources, "Error: DEM source must be one of: " + ", ".join(DEM_sources)
+ if config.fileformat != "GeoTiff":
+ assert config.unprojected == False, "Error: STL/OBJ export cannot use unprojected, only available for GeoTiff export"
else: # local raster file as DEM source
- assert os.path.exists(importedDEM), "Error: local DEM raster file " + importedDEM + " does not exist"
- assert fileformat != "GeoTiff", "Error: it's silly to make a Geotiff from a local DEM file (" + importedDEM + ") instead of a mesh file format ..."
- if bottom_elevation != None:
- assert os.path.exists(bottom_elevation), "Error: bottom elevation raster file " + bottom_elevation + " does not exist"
-
+ assert os.path.exists(config.importedDEM), "Error: local DEM raster file " + config.importedDEM + " does not exist"
+ assert config.fileformat != "GeoTiff", "Error: it's silly to make a Geotiff from a local DEM file (" + config.importedDEM + ") instead of a mesh file format ..."
+ if config.bottom_elevation is not None:
+ assert os.path.exists(config.bottom_elevation), "Error: bottom elevation raster file " + config.bottom_elevation + " does not exist"
+
+ if config.importedDEM_interp != None:
+ assert os.path.exists(config.importedDEM_interp), "Error: local DEM raster file " + config.importedDEM_interp + " does not exist"
+ assert config.fileformat != "GeoTiff", "Error: it's silly to make a Geotiff from a local DEM file (" + config.importedDEM_interp + ") instead of a mesh file format ..."
- assert not (bottom_image != None and no_bottom == True), "Error: Can't use no_bottom=True and also want a bottom_image (" + bottom_image + ")"
- assert not (bottom_image != None and basethick <= 0.5), "Error: base thickness (" + str(basethick) + ") must be > 0.5 mm when using a bottom relief image"
+ assert not (config.bottom_image != None and config.no_bottom == True), "Error: Can't use no_bottom=True and also want a bottom_image (" + config.bottom_image + ")"
+ assert not (config.bottom_image != None and config.basethick <= 0.5), "Error: base thickness (" + str(config.basethick) + ") must be > 0.5 mm when using a bottom relief image"
- assert not (bottom_elevation != None and bottom_image != None), "Error: Can't use both bottom_elevation and bottom_image"
- assert not (bottom_image != None and top_thickness != None), "Error: Can't use both bottom_image and top_thickness"
- assert not (bottom_elevation != None and no_bottom == True), "Error: Can't use no_bottom=True and also want a bottom_elevation (" + bottom_elevation + ")"
- assert not (bottom_elevation != None and top_thickness != None), "Error: Can't use both bottom_elevation and top_thickness"
+ assert not (config.bottom_elevation != None and config.bottom_image != None), "Error: Can't use both bottom_elevation and bottom_image"
+ assert not (config.bottom_image != None and config.top_thickness != None), "Error: Can't use both bottom_image and top_thickness"
+ assert not (config.bottom_elevation != None and config.no_bottom == True), "Error: Can't use no_bottom=True and also want a bottom_elevation (" + config.bottom_elevation + ")"
+ assert not (config.bottom_elevation != None and config.top_thickness != None), "Error: Can't use both bottom_elevation and top_thickness"
- assert not (bottom_elevation != None and use_geo_coords != None), "Error: use_geo_coords is currently not supported with a bottom_elevation raster"
+ assert not (config.bottom_elevation != None and config.use_geo_coords != None), "Error: use_geo_coords is currently not supported with a bottom_elevation raster"
# Check offset mask file
- if offset_masks_lower != None:
- for offset_pair in offset_masks_lower:
+ if config.offset_masks_lower != None:
+ for offset_pair in config.offset_masks_lower:
print(offset_pair[0])
assert os.path.exists(offset_pair[0]), "Error: local offset mask raster file " + offset_pair[0] + " does not exist"
- if not os.path.exists(temp_folder): # do we have a temp folder?
+ if not os.path.exists(config.temp_folder): # do we have a temp folder?
try:
- os.mkdir(temp_folder)
+ os.mkdir(config.temp_folder)
except:
- assert False, temp_folder + "doesn't exists but could also not be created"
+ assert False, config.temp_folder + "doesn't exists but could also not be created"
+
+ # Determine config filename from the full config path
+ config_filename = None
+ if config.config_path is not None:
+ config_filename = os.path.splitext(os.path.basename(config.config_path))[0]
+
+ # if zip_file_name not specified, use DEM_name or config_filename from config_path for zip_file_name if it is specified (compare with the default value)
+ if config.zip_file_name is None or len(config.zip_file_name) == 0:
+ if config.DEM_name == TouchTerrainConfig().DEM_name or config.DEM_name is None:
+ if config_filename is not None:
+ config.zip_file_name = config_filename
+ else:
+ print('No non-default DEM_name or config_path passed for determining zip_file_name')
+ return
+ else:
+ config.zip_file_name = config.DEM_name
+
# set up log file
- log_file_name = temp_folder + os.sep + zip_file_name + ".log"
+ log_file_name = config.temp_folder + os.sep + config.zip_file_name + ".log"
log_file_handler = logging.FileHandler(log_file_name, mode='w+')
formatter = logging.Formatter("%(message)s")
log_file_handler.setFormatter(formatter)
logger.addHandler(log_file_handler)
# number of tiles in EW (x,long) and NS (y,lat), must be ints
- num_tiles = [int(ntilesx), int(ntilesy)]
+ num_tiles = [int(config.ntilesx), int(config.ntilesy)]
- if only != None:
- assert only[0] > 0 and only[0] <= num_tiles[0], "Error: x index of only tile out of range"
- assert only[1] > 0 and only[1] <= num_tiles[1], "Error: y index of only tile out of range"
+ if config.only != None:
+ assert config.only[0] > 0 and config.only[0] <= num_tiles[0], "Error: x index of only tile out of range"
+ assert config.only[1] > 0 and config.only[1] <= num_tiles[1], "Error: y index of only tile out of range"
# horizontal size of "cells" on the 3D printed model (realistically: the diameter of the nozzle)
- print3D_resolution_mm = printres
+ print3D_resolution_mm = config.printres
# Nov 19, 2021: As multi processing is still broken, I'm setting CPU to 1 for now ...
#CPU_cores_to_use = 1
+ npim: Union[numpy.ndarray, None] = None
+ interp_npim: Union[numpy.ndarray, None] = None
+ top_elevation_hint_npim: Union[numpy.ndarray, None] = None
+
#
# get polygon data, either from GeoJSON (or just it's coordinates as a list) or from kml URL or file
#
clip_poly_coords = None # list of lat/lons, will create ee.Feature used for clipping the terrain image
- if polygon != None:
+ if config.polygon != None:
# If we have a GeoJSON and also a kml
- if polyURL != None and polyURL != '':
+ if config.polyURL != None and config.polyURL != '':
pr("Warning: polygon via Google Drive KML will be ignored b/c a GeoJSON polygon was also given!")
- elif poly_file != None and poly_file != '':
+ elif config.poly_file != None and config.poly_file != '':
pr("Warning: polygon via KML file will be ignored b/c a GeoJSON polygon was also given!")
# Check if we have a GeoJSON polygon (i.e. a dict) or at least a coordinate list
# ex: {"coordinates": [[[60.48766, -81.597101], [60.571116, -81.598891], ...]], "type": "Polygon"}
- if isinstance(polygon, dict):
- assert polygon["type"] == 'Polygon', f"Error: dict is not a GeoJSON polygon: {polygon}"
+ if isinstance(config.polygon, dict):
+ assert config.polygon["type"] == 'Polygon', f"Error: dict is not a GeoJSON polygon: {config.polygon}"
# Extract polygon coordinates (throw away [1] which would be a doughnut hole)
- clip_poly_coords = polygon["coordinates"][0] # ignore holes, which would be in 1,2, ...
- elif isinstance(polygon, list): # maybe it's just the coordinates? [[60.48766, -81.597101], [60.571116, -81.598891], ...]
- clip_poly_coords = polygon[0]
+ clip_poly_coords = config.polygon["coordinates"][0] # ignore holes, which would be in 1,2, ...
+ elif isinstance(config.polygon, list): # maybe it's just the coordinates? [[60.48766, -81.597101], [60.571116, -81.598891], ...]
+ clip_poly_coords = config.polygon[0]
else:
- assert False, f"Error: coordinate format must be: [[[x,y], [x,y], ...]] not {polygon}"
+ assert False, f"Error: coordinate format must be: [[[x,y], [x,y], ...]] not {config.polygon}"
logging.info("Using GeoJSON polygon for masking with " + str(len(clip_poly_coords)) + " points")
# make area selection box from bounding box of polygon
- trlat, trlon, bllat, bllon = get_bounding_box(clip_poly_coords)
+ config.trlat, config.trlon, config.bllat, config.bllon = get_bounding_box(clip_poly_coords)
# Hack: If we only have 5 points forming a rectangle just use the bounding box and forget about the polyon
# Otherwise a rectangle digitized via gee ends up as a slightly sheared rectangle
@@ -767,48 +838,48 @@ def get_zipped_tiles(DEM_name=None, trlat=None, trlon=None, bllat=None, bllon=No
#print("5 point clip polygon is", clip_poly_coords)
p = clip_poly_coords # p[0], p[1], etc., p[x][0] is lat p[x][1] is lon
if p[0][0] == p[3][0] and p[1][0] == p[2][0] and p[0][1] == p[1][1] and p[2][1] == p[3][1]:
- print("ignoring geemap box polygon, using bounding box", trlat, trlon, bllat, bllon)
+ print("ignoring geemap box polygon, using bounding box", config.trlat, config.trlon, config.bllat, config.bllon)
clip_poly_coords = None
# Get poly from a KML file via google drive URL
#TODO: TEST THIS!!!!!!
- elif polyURL != None and polyURL != '':
+ elif config.polyURL != None and config.polyURL != '':
import re, requests
pattern = r".*[^-\w]([-\w]{25,})[^-\w]?.*" # https://stackoverflow.com/questions/16840038/easiest-way-to-get-file-id-from-url-on-google-apps-script
- matches = re.search(pattern, polyURL)
+ matches = re.search(pattern, config.polyURL)
if matches and len(matches.groups()) == 1: # need to have exactly one group match
file_URL = "https://docs.google.com/uc?export=download&id=" + matches.group(1)
else:
- assert False, "Error: polyURL is invalid: " + polyURL
+ assert False, "Error: polyURL is invalid: " + config.polyURL
try:
r = requests.get(file_URL)
r.raise_for_status()
except Exception as e:
- pr("Error: GDrive kml download failed", e, " - falling back to region box", trlat, trlon, bllat, bllon)
+ pr("Error: GDrive kml download failed", e, " - falling back to region box", config.trlat, config.trlon, config.bllat, config.bllon)
else:
t = r.text
clip_poly_coords, msg = get_KML_poly_geometry(t)
if msg != None: # Either go a line instead of polygon (take but warn) or nothing (ignore)
logging.warning(msg + "(" + str(len(clip_poly_coords)) + " points)")
else:
- logging.info("Read GDrive KML polygon with " + str(len(clip_poly_coords)) + " points from " + polyURL)
+ logging.info("Read GDrive KML polygon with " + str(len(clip_poly_coords)) + " points from " + config.polyURL)
- elif poly_file != None and poly_file != '':
+ elif config.poly_file != None and config.poly_file != '':
try:
- with open(poly_file, "r") as pf:
+ with open(config.poly_file, "r") as pf:
poly_file_str = pf.read()
except Exception as e:
- pr("Read Error with kml file", poly_file, ":", e, " - falling back to region box", trlat, trlon, bllat, bllon)
+ pr("Read Error with kml file", config.poly_file, ":", e, " - falling back to region box", config.trlat, config.trlon, config.bllat, config.bllon)
else:
clip_poly_coords, msg = get_KML_poly_geometry(poly_file_str)
if msg != None: # Either got a line instead of polygon (take but warn) or nothing (ignore)
logging.warning(msg + "(" + str(len(clip_poly_coords)) + " points)")
else:
- logging.info("Read file KML polygon with " + str(len(clip_poly_coords)) + " points from " + poly_file)
+ logging.info("Read file KML polygon with " + str(len(clip_poly_coords)) + " points from " + config.poly_file)
# make area selection box from bounding box of polygon
- trlat, trlon, bllat, bllon = get_bounding_box(clip_poly_coords)
+ config.trlat, config.trlon, config.bllat, config.bllon = get_bounding_box(clip_poly_coords)
# end of polygon stuff
@@ -818,27 +889,25 @@ def get_zipped_tiles(DEM_name=None, trlat=None, trlon=None, bllat=None, bllon=No
# This is needed to avoid python unbound error since offset_npim is currently only available for local DEMs in standalone python script
offset_npim = []
+ #region A) use Earth Engine to download DEM geotiff
#
- # A) use Earth Engine to download DEM geotiff
#
- if importedDEM == None:
- try:
- import ee
- except Exception as e:
- print("Earth Engine module (ee) not installed", e, file=sys.stderr)
+ if config.importedDEM == None:
+ ee = initialize_earth_engine()
+ assert ee is not None, f"Error: Could not initialize Earth Engine with {EE_CREDS}"
- region = [[bllon, trlat],#WS NW
- [trlon, trlat],#EN NE
- [trlon, bllat],#ES SE
- [bllon, bllat]]#WS SW
+ region = [[config.bllon, config.trlat],#WS NW
+ [config.trlon, config.trlat],#EN NE
+ [config.trlon, config.bllat],#ES SE
+ [config.bllon, config.bllat]]#WS SW
# get center of region as lon/lat, needed for conversion to meters
center = [(region[0][0] + region[1][0]) / 2, (region[0][1] + region[2][1]) / 2]
# Make a more descriptive name for the selected DEM from it's official (ee) name and the center
# if there's a / (e.g. NOAA/NGDC/ETOPO1), just get the last, ETOPO1
- DEM_title = DEM_name
- if '/' in DEM_name:
+ DEM_title = config.DEM_name
+ if '/' in config.DEM_name:
DEM_title = DEM_title.split('/')[-1]
DEM_title = "%s_%.2f_%.2f" % (DEM_title, center[0], center[1])
@@ -867,26 +936,26 @@ def get_zipped_tiles(DEM_name=None, trlat=None, trlon=None, bllat=None, bllon=No
pr("\nprocess started: " + datetime.datetime.now().time().isoformat())
- pr("\nRegion (lat/lon):\n ", trlat, trlon, "(top right)\n ", bllat, bllon, "(bottom left)")
+ pr("\nRegion (lat/lon):\n ", config.trlat, config.trlon, "(top right)\n ", config.bllat, config.bllon, "(bottom left)")
#
# Figure out which projection to use when getting DEM from GEE
#
- if unprojected == False:
- if projection != None:
- epsg = projection
+ if config.unprojected == False:
+ if config.projection != None:
+ epsg = config.projection
crs_str = f"EPSG:{epsg}"
#utm_zone_str = crs_str
pr("using " + crs_str + " as projection")
- elif bllat > 70: # too far north for UTM, use Arctic Polar Stereographic
+ elif config.bllat > 70: # too far north for UTM, use Arctic Polar Stereographic
#utm_zone_str = "WGS 84 / Arctic Polar Stereographic"
epsg = 3995
crs_str = f"EPSG:{epsg}"
pr("Too far north for UTM - using Arctic Polar Stereographic projection (EPSG 3995)")
- elif trlat < -55: # too far south for UTM, use Arctic Polar Stereographic
+ elif config.trlat < -55: # too far south for UTM, use Arctic Polar Stereographic
#tm_zone_str = "WGS 84 / Arctic Polar Stereographic"
epsg = 3031
crs_str = f"EPSG:{epsg}"
@@ -916,12 +985,12 @@ def get_zipped_tiles(DEM_name=None, trlat=None, trlon=None, bllat=None, bllon=No
region_ratio = region_size_in_meters[1] / float(region_size_in_meters[0])
# if tilewidth_scale is given, overwrite tilewidth by region width / tilewidth_scale
- if tilewidth_scale != None:
- tilewidth = region_size_in_meters[1] / tilewidth_scale * 1000 # new tilewidth in mm
- pr("Overriding tilewidth using a tilewidth_scale of 1 :", tilewidth_scale, ", region width is", region_size_in_meters[1], "m, new tilewidth is", tilewidth, "(Note that the final scale may be slighly different!)")
+ if config.tilewidth_scale != None:
+ config.tilewidth = region_size_in_meters[1] / config.tilewidth_scale * 1000 # new tilewidth in mm
+ pr("Overriding tilewidth using a tilewidth_scale of 1 :", config.tilewidth_scale, ", region width is", region_size_in_meters[1], "m, new tilewidth is", config.tilewidth, "(Note that the final scale may be slighly different!)")
# width/height (in 2D) of 3D model of ONE TILE to be printed, in mm
- print3D_width_per_tile = tilewidth # EW
+ print3D_width_per_tile = config.tilewidth # EW
print3D_height_per_tile = (print3D_width_per_tile * num_tiles[0] * region_ratio) / float(num_tiles[1]) # NS
# width/height of full 3D model (all tiles together)
@@ -954,14 +1023,14 @@ def get_zipped_tiles(DEM_name=None, trlat=None, trlon=None, bllat=None, bllon=No
#
# Get a download URL for DEM from Earth Engine
#
- if DEM_name in ("NRCan/CDEM", "AU/GA/AUSTRALIA_5M_DEM"): # Image collection?
- coll = ee.ImageCollection(DEM_name)
+ if config.DEM_name in ("NRCan/CDEM", "AU/GA/AUSTRALIA_5M_DEM"): # Image collection?
+ coll = ee.ImageCollection(config.DEM_name)
info = coll.getInfo()
elev = coll.select('elevation')
proj = elev.first().select(0).projection() # must use common projection(?)
image1 = elev.mosaic().setDefaultProjection(proj) # must mosaic collection into single image
else:
- image1 = ee.Image(DEM_name)
+ image1 = ee.Image(config.DEM_name)
info = image1.getInfo()
@@ -991,11 +1060,11 @@ def get_zipped_tiles(DEM_name=None, trlat=None, trlon=None, bllat=None, bllon=No
image1 = image1.clip(clip_feature).unmask(-32768, False)
# Make a geoJSON polygon to define the area to be printed
- reg_rect = ee.Geometry.Rectangle([[bllon, bllat], [trlon, trlat]]) # opposite corners
- if polygon == None:
+ reg_rect = ee.Geometry.Rectangle([[config.bllon, config.bllat], [config.trlon, config.trlat]]) # opposite corners
+ if config.polygon == None:
polygon_geojson = reg_rect.toGeoJSONString() # polyon is just the bounding box
else:
- polygon_geojson = polygon # actual polygon used as mask
+ polygon_geojson = config.polygon # actual polygon used as mask
# make the request dict
request_dict = {
@@ -1011,7 +1080,7 @@ def get_zipped_tiles(DEM_name=None, trlat=None, trlon=None, bllat=None, bllon=No
if cell_size_m <= 0: del request_dict["scale"]
# force to use unprojected (lat/long) instead of UTM projection, can only work for Geotiff export
- if unprojected == True: del request_dict["crs"]
+ if config.unprojected == True: del request_dict["crs"]
request = image1.getDownloadUrl(request_dict)
pr("URL for geotiff is: ", request)
@@ -1058,7 +1127,7 @@ def get_zipped_tiles(DEM_name=None, trlat=None, trlon=None, bllat=None, bllon=No
assert tifl != [], "zip from ee didn't contain a tif: " + str(nl)
# ETOPO will have bedrock and ice_surface tifs
- if DEM_name == """NOAA/NGDC/ETOPO1""":
+ if config.DEM_name == """NOAA/NGDC/ETOPO1""":
tif = [f for f in tifl if "ice_surface" in f][0] # get the DEM tif that has the ice surface
else:
tif = tifl[0] # for non ETOPO, there's just one DEM tif in that list
@@ -1071,15 +1140,15 @@ def get_zipped_tiles(DEM_name=None, trlat=None, trlon=None, bllat=None, bllon=No
str_data = zipdir.read(tif)
# write the GEE geotiff into the temp folder and add it to the zipped d/l folder later
- GEE_dem_filename = temp_folder + os.sep + zip_file_name + "_dem.tif"
+ GEE_dem_filename = config.temp_folder + os.sep + config.zip_file_name + "_dem.tif"
with open(GEE_dem_filename, "wb+") as out:
out.write(str_data)
# use GDAL to get cell size and undef value of geotiff
- dem = gdal.Open(GEE_dem_filename)
+ dem: gdal.Dataset = gdal.Open(GEE_dem_filename)
ras_x_sz = dem.RasterXSize # number of pixels in x
ras_y_sz = dem.RasterYSize
- band = dem.GetRasterBand(1)
+ band: gdal.Band = dem.GetRasterBand(1)
dem_undef_val = band.GetNoDataValue()
geo_transform = dem.GetGeoTransform()
GEE_cell_size_m = (geo_transform[1], geo_transform[5])
@@ -1091,9 +1160,11 @@ def get_zipped_tiles(DEM_name=None, trlat=None, trlon=None, bllat=None, bllon=No
pr(" geotiff size:", len(str_data) / 1048576.0, "Mb")
pr(" cell size", cell_size_m, "m, upper left corner (x/y): ", geo_transform[0], geo_transform[3])
- if fileformat == "GeoTiff": # for Geotiff output, we don't need to make a numpy array, etc, just close the GDAL dem so we can move it into the zip later
- dem = None # Python GDAL's way of closing/freeing the raster
+ if config.fileformat == "GeoTiff": # for Geotiff output, we don't need to make a numpy array, etc, just close the GDAL dem so we can move it into the zip later
del band
+ # Keep dem object around so we can use it later. We need info from it to reproject the vector files
+ # dem = None # Python GDAL's way of closing/freeing the raster
+
else: # mesh file export
assert abs(geo_transform[1]) == abs(geo_transform[5]), "Error: raster cells are not square!" # abs() b/c one can be just the negative of the other in GDAL's geotranform matrix
@@ -1108,7 +1179,7 @@ def get_zipped_tiles(DEM_name=None, trlat=None, trlon=None, bllat=None, bllon=No
# although STL can only use 32-bit floats, we need to use 64 bit floats
# for calculations, otherwise we get non-manifold vertices!
- npim = band.ReadAsArray().astype(numpy.float64)
+ npim = cast(numpy.ndarray, band.ReadAsArray()).astype(numpy.float64)
#npim = band.ReadAsArray().astype(numpy.longdouble)
#print(npim, npim.shape, npim.dtype, numpy.nanmin(npim), numpy.nanmax(npim)) #DEBUG
@@ -1124,20 +1195,20 @@ def get_zipped_tiles(DEM_name=None, trlat=None, trlon=None, bllat=None, bllon=No
# For AU/GA/AUSTRALIA_5M_DEM, replace all exact 0 value with NaN
# b/c there are spots on land that have no pixels, but these are encoded as 0 and
# need to be marked as NaN otherwise they screw up the thickness of the base
- if DEM_name == "AU/GA/AUSTRALIA_5M_DEM":
+ if config.DEM_name == "AU/GA/AUSTRALIA_5M_DEM":
npim = numpy.where(npim == 0.0, numpy.nan, npim)
# Add GPX points to the model (thanks KohlhardtC!)
- if importedGPX != None and importedGPX != []:
+ if config.importedGPX != None and config.importedGPX != []:
from touchterrain.common.TouchTerrainGPX import addGPXToModel
- addGPXToModel(pr, npim, dem, importedGPX,
- gpxPathHeight, gpxPixelsBetweenPoints, gpxPathThickness,
- trlat, trlon, bllat, bllon)
+ addGPXToModel(pr, npim, dem, config.importedGPX,
+ config.gpxPathHeight, config.gpxPixelsBetweenPoints, config.gpxPathThickness,
+ config.trlat, config.trlon, config.bllat, config.bllon)
# clip values?
- if ignore_leq != None:
- npim = numpy.where(npim <= ignore_leq, numpy.nan, npim)
- pr("ignoring elevations <= ", ignore_leq, " (were set to NaN)")
+ if config.ignore_leq != None:
+ npim = numpy.where(npim <= config.ignore_leq, numpy.nan, npim)
+ pr("ignoring elevations <= ", config.ignore_leq, " (were set to NaN)")
# Polygon masked pixels will have been set to -32768, so turn
# these into NaN. Huge values can also occur outside
@@ -1157,7 +1228,7 @@ def get_zipped_tiles(DEM_name=None, trlat=None, trlon=None, bllat=None, bllon=No
region_ratio = npim.shape[0] / float(npim.shape[1])
# width/height (in 2D) of 3D model of ONE TILE to be printed, in mm
- print3D_width_per_tile = tilewidth # EW
+ print3D_width_per_tile = config.tilewidth # EW
print3D_height_per_tile = (print3D_width_per_tile * num_tiles[0] * region_ratio) / float(num_tiles[1]) # NS
# width/height of full 3D model (all tiles together)
@@ -1170,7 +1241,7 @@ def get_zipped_tiles(DEM_name=None, trlat=None, trlon=None, bllat=None, bllon=No
adjusted_print3D_resolution = print3D_width_total_mm / float(npim.shape[1])
- if printres > 0: # did NOT use source resolution
+ if config.printres > 0: # did NOT use source resolution
pr("cell size:", cell_size_m, "m ")
pr("adjusted print res from the requested", print3D_resolution_mm, "mm to", adjusted_print3D_resolution, "mm to ensure correct model dimensions")
print3D_resolution_mm = adjusted_print3D_resolution
@@ -1181,51 +1252,66 @@ def get_zipped_tiles(DEM_name=None, trlat=None, trlon=None, bllat=None, bllon=No
pr("total model size in mm:", print3D_width_total_mm, "x", print3D_height_total_mm)
# end of getting DEM data via GEE (A)
+ #endregion
- #
- # B) DEM data comes from a local raster file (geotiff, etc.)
+ #region B) DEM data comes from a local raster file (geotiff, etc.)
#
# TODO: deal with clip polygon? Done for KML (poly_file)
-
+ # TODO: split the GEE and imported DEM code into separate functions
else:
- filename = os.path.basename(importedDEM)
-
- if bottom_elevation != None:
- btxt = "and " + bottom_elevation
- elif top_thickness != None:
- btxt = "and " + top_thickness
+ importedDEM_filename = os.path.basename(config.importedDEM)
+ importedDEM_folder = os.path.dirname(config.importedDEM)
+
+ importedDEM_interp_filename = None
+ if config.importedDEM_interp:
+ importedDEM_interp_filename = os.path.basename(config.importedDEM_interp)
+
+ if config.bottom_elevation != None:
+ btxt = "and " + config.bottom_elevation
+ elif config.top_thickness != None:
+ btxt = "and " + config.top_thickness
else:
btxt = ""
- pr("Log for creating", num_tiles[0], "x", num_tiles[1], "3D model tile(s) from", filename, btxt, "\n")
+ pr("Log for creating", num_tiles[0], "x", num_tiles[1], "3D model tile(s) from", importedDEM_filename, btxt, "\n")
pr("started:", datetime.datetime.now().time().isoformat())
# If we have a KML file, use it to mask (clip) and crop the importedDEM
- if poly_file != None and poly_file != '':
- clipped_geotiff = "clipped_" + filename
+ if config.poly_file != None and config.poly_file != '':
+ clipped_geotiff = os.path.join(importedDEM_folder, "clipped_" + importedDEM_filename)
try:
- gdal.Warp(clipped_geotiff, filename,
+ gdal.Warp(clipped_geotiff, config.importedDEM,
format='GTiff',
warpOptions=['CUTLINE_ALL_TOUCHED=TRUE'],
- cutlineDSName=poly_file,
+ cutlineDSName=config.poly_file,
cropToCutline=True,
dstNodata=-32768)
except Exception as e:
- pr("clipping", filename, "with", poly_file, "failed, using unclipped geotiff. ", e)
+ pr("clipping", config.importedDEM, "with", config.poly_file, "failed, using unclipped geotiff. ", e)
else:
- pr("clipped", filename, "with", poly_file, "now using", clipped_geotiff, "instead")
- folder = os.path.split(importedDEM)[0]
- importedDEM = os.path.join(folder, clipped_geotiff)
+ pr("clipped", importedDEM_filename, "with", config.poly_file, "now using", clipped_geotiff, "instead")
+ folder = os.path.split(config.importedDEM)[0]
+ config.importedDEM = os.path.join(folder, clipped_geotiff)
# Make numpy array from imported geotiff
- dem = gdal.Open(importedDEM)
+ dem = gdal.Open(config.importedDEM)
band = dem.GetRasterBand(1)
- npim = band.ReadAsArray().astype(numpy.float64) # top elevation values
+ npim = cast(numpy.ndarray, band.ReadAsArray()).astype(numpy.float64) # top elevation values
+
+ if importedDEM_interp_filename:
+ interp_dem = gdal.Open(config.importedDEM_interp)
+ interp_band = interp_dem.GetRasterBand(1)
+ interp_npim = cast(numpy.ndarray, interp_band.ReadAsArray()).astype(numpy.float64) # top interpolation elevation values
+
+ if config.top_elevation_hint:
+ top_elevation_hint_dem = gdal.Open(config.top_elevation_hint)
+ top_elevation_hint_band = top_elevation_hint_dem.GetRasterBand(1)
+ top_elevation_hint_npim = cast(numpy.ndarray, top_elevation_hint_band.ReadAsArray()).astype(numpy.float64)
# Read in offset mask file (Anson's stuff ...)
- if offset_masks_lower is not None:
- offset_dem = gdal.Open(offset_masks_lower[0][0])
+ if config.offset_masks_lower is not None:
+ offset_dem = gdal.Open(config.offset_masks_lower[0][0])
offset_band = offset_dem.GetRasterBand(1)
offset_npim.append(offset_band.ReadAsArray().astype(numpy.float64))
del offset_band
@@ -1270,14 +1356,19 @@ def get_GDAL_projection_and_datum(raster):
if dem_undef_val != None: # None means the raster is not a geotiff, so no undef values
undef_cells = numpy.isclose(npim, dem_undef_val) # bool with cells that are close to the GDAL undef value
npim = numpy.where(undef_cells, numpy.nan, npim) # replace GDAL undef values with nan
+ if interp_npim is not None:
+ interp_npim = numpy.where(numpy.isclose(interp_npim, dem_undef_val), numpy.nan, interp_npim) # replace GDAL undef
+
+ if top_elevation_hint_npim is not None:
+ top_elevation_hint_npim = numpy.where(numpy.isclose(top_elevation_hint_npim, dem_undef_val), numpy.nan, top_elevation_hint_npim) # replace GDAL undef
# for a bottom raster or a thickness raster, check that it matches the top raster
- if bottom_elevation != None or top_thickness != None:
- if bottom_elevation != None:
- ras = gdal.Open(bottom_elevation) # using ras here b/c it can be one of two rasters
+ if config.bottom_elevation != None or config.top_thickness != None:
+ if config.bottom_elevation != None:
+ ras = gdal.Open(config.bottom_elevation) # using ras here b/c it can be one of two rasters
else:
- ras = gdal.Open(top_thickness)
+ ras = gdal.Open(config.top_thickness)
ras_band = ras.GetRasterBand(1)
ras_npim = ras_band.ReadAsArray().astype(numpy.float64) # bottom elevation or thickness values as numpy array
ras_tf = ras.GetGeoTransform()
@@ -1305,13 +1396,13 @@ def get_GDAL_projection_and_datum(raster):
ras_npim = numpy.where(undef_cells, numpy.nan, ras_npim) # replace undef values with nan
# get bottom elevation as numpy array or create it be subtracting thickness from top elevation
- if bottom_elevation != None:
+ if config.bottom_elevation != None:
bot_npim = ras_npim # numpy array to be used later
else:
bot_npim = npim - ras_npim # bottom = top - thickness
del ras_npim # don't need it anymore
# Pretend we have a bottom elevation raster of that name so all further checks for bottom will work
- bottom_elevation = top_thickness
+ config.bottom_elevation = config.top_thickness
# close/delete the GDAL raster and band here, b/c I only need the numpy array from now on (and meta data has been stored)
ras = None # close the GDAL raster on disk
@@ -1319,66 +1410,80 @@ def get_GDAL_projection_and_datum(raster):
# Print out some info about the raster
- pr("DEM (top) raster file:", importedDEM)
- if top_thickness != None and top_thickness != '':
- pr("Top thickness raster file:", top_thickness)
- elif bottom_elevation != None:
- pr("Bottom elevation raster file:", bottom_elevation)
+ pr("DEM (top) raster file:", config.importedDEM)
+ if config.top_thickness != None and config.top_thickness != '':
+ pr("Top thickness raster file:", config.top_thickness)
+ elif config.bottom_elevation != None:
+ pr("Bottom elevation raster file:", config.bottom_elevation)
pr("DEM projection & datum:", proj_str, datum_str)
- pr("z-scale:", zscale)
- pr("min_elev:", min_elev)
- pr("basethickness:", basethick)
- pr("fileformat:", fileformat)
- pr("tile_centered:", tile_centered)
- pr("no_bottom:", no_bottom)
- pr("no_normals:", no_normals)
- pr("ignore_leq:", ignore_leq)
- pr("lower_leq:", lower_leq)
- pr("importedGPX:", importedGPX)
+ pr("z-scale:", config.zscale)
+ pr("min_elev:", config.min_elev)
+ pr("basethickness:", config.basethick)
+ pr("fileformat:", config.fileformat)
+ pr("tile_centered:", config.tile_centered)
+ pr("no_bottom:", config.no_bottom)
+ pr("no_normals:", config.no_normals)
+ pr("ignore_leq:", config.ignore_leq)
+ pr("lower_leq:", config.lower_leq)
+ pr("importedGPX:", config.importedGPX)
#pr("polyURL:", polyURL)
# Warn that anything with polygon will be ignored with a local raster (other than offset_masks!)
- if polygon != None or (polyURL != None and polyURL != ''):
+ if config.polygon != None or (config.polyURL != None and config.polyURL != ''):
pr("Warning: Given outline polygon will be ignored when using local raster file!")
# Add GPX points to the model (thanks KohlhardtC and ansonl!)
- if importedGPX != None and importedGPX != []:
+ if config.importedGPX != None and config.importedGPX != []:
from touchterrain.common.TouchTerrainGPX import addGPXToModel
- addGPXToModel(pr, npim, dem, importedGPX,
- gpxPathHeight, gpxPixelsBetweenPoints, gpxPathThickness,
- trlat, trlon, bllat, bllon)
+ addGPXToModel(pr, npim, dem, config.importedGPX,
+ config.gpxPathHeight, config.gpxPixelsBetweenPoints, config.gpxPathThickness,
+ config.trlat, config.trlon, config.bllat, config.bllon)
# clip values?
- if ignore_leq != None:
- npim = numpy.where(npim <= ignore_leq, numpy.nan, npim)
- pr("ignoring elevations <= ", ignore_leq, " (were set to NaN)")
+ if config.ignore_leq != None:
+ npim = numpy.where(npim <= config.ignore_leq, numpy.nan, npim)
+ pr("ignoring elevations <= ", config.ignore_leq, " (were set to NaN)")
+ if interp_npim is not None:
+ interp_npim = numpy.where(interp_npim <= config.ignore_leq, numpy.nan, interp_npim)
# if tilewidth_scale is given, overwrite mm tilewidth by region width / tilewidth_scale
- if tilewidth_scale != None:
- tilewidth = region_size_in_meters[1] / tilewidth_scale * 1000 # new tilewidth in mm
- pr("Overriding tilewidth using a tilewidth_scale of 1 :", tilewidth_scale, ", region width is", region_size_in_meters[1], "m, new tilewidth is", tilewidth, "mm. (Note that the final scale may be slighly different!)")
-
-
- # tile height
- whratio = npim.shape[0] / float(npim.shape[1])
- tileheight = tilewidth * whratio
- pr("tile_width:", tilewidth)
- pr("tile_height:", tileheight)
- print3D_width_per_tile = tilewidth
- print3D_height_per_tile = tileheight
+ if config.tilewidth_scale != None:
+ config.tilewidth = region_size_in_meters[1] / config.tilewidth_scale * 1000 # new tilewidth in mm
+ pr("Overriding tilewidth using a tilewidth_scale of 1 :", config.tilewidth_scale, ", region width is", region_size_in_meters[1], "m, new tilewidth is", config.tilewidth, "mm. (Note that the final scale may be slighly different!)")
+
+ #region Print width and height of tile
+ print3D_width_per_tile = -1
+ print3D_height_per_tile = -1
+
+ if config.tileScale is not None: # use config.tileScale
+ pr("tileScale:", config.tileScale)
+ print3D_width_per_tile, print3D_height_per_tile = get_print3D_dimensions(dem=dem, tile_scale=config.tileScale)
+ else: # use config.tilewidth
+ whratio = npim.shape[0] / float(npim.shape[1])
+ tileheight = config.tilewidth * whratio
+ pr("tile_width:", config.tilewidth)
+ pr("tile_height:", tileheight)
+ print3D_width_per_tile = config.tilewidth
+ print3D_height_per_tile = tileheight
+
+ if print3D_width_per_tile < 0 or print3D_height_per_tile < 0:
+ pr("no tilewidth or tileScale provided")
+ return
+ #endregion
+
print3D_width_total_mm = print3D_width_per_tile * num_tiles[0]
real_world_total_width_m = npim.shape[1] * cell_size_m
pr("source raster width", real_world_total_width_m, "m,", "cell size:", cell_size_m, "m, elev. min/max is", numpy.nanmin(npim), numpy.nanmax(npim), "m")
# What would be the 3D print resolution using the original/unresampled source resolution?
- source_print3D_resolution = (tilewidth*ntilesx) / float(npim.shape[1])
+ source_print3D_resolution = (print3D_width_per_tile*config.ntilesx) / float(npim.shape[1])
pr("source raster 3D print resolution would be", source_print3D_resolution, "mm")
# Resample raster to get requested printres?
- if printres <= 0: # use of source resolution was requested (typically set as -1)
+ if config.printres <= 0: # use of source resolution was requested (typically set as -1)
pr("no resampling, using source resolution of ", source_print3D_resolution, "mm for a total model width of", print3D_width_total_mm, "mm")
- if source_print3D_resolution < 0.2 and fileformat != "GeoTiff":
+ if source_print3D_resolution < 0.2 and config.fileformat != "GeoTiff":
pr("Warning: this print resolution of", source_print3D_resolution, "mm is pretty small for a typical nozzle size of 0.4 mm. You might want to use a printres that's just a bit smaller than your nozzle size ...")
print3D_resolution_mm = source_print3D_resolution
@@ -1388,10 +1493,10 @@ def get_GDAL_projection_and_datum(raster):
pr("Warning: will re-sample to a resolution finer than the original source raster. Consider instead a value for printres >", source_print3D_resolution)
# re-sample DEM (and bottom_elevation) using PIL
- pr("re-sampling", filename, ":\n ", npim.shape[::-1], source_print3D_resolution, "mm ", cell_size_m, "m ", numpy.nanmin(npim), "-", numpy.nanmax(npim), "m")
+ pr("re-sampling", importedDEM_filename, ":\n ", npim.shape[::-1], source_print3D_resolution, "mm ", cell_size_m, "m ", numpy.nanmin(npim), "-", numpy.nanmax(npim), "m")
npim = resampleDEM(npim, scale_factor)
- if bottom_elevation != None:
- pr("re-sampling", bottom_elevation, ":\n ", bot_npim.shape[::-1], source_print3D_resolution, "mm ", cell_size_m, "m ", numpy.nanmin(bot_npim), "-", numpy.nanmax(bot_npim), "m")
+ if config.bottom_elevation != None:
+ pr("re-sampling", config.bottom_elevation, ":\n ", bot_npim.shape[::-1], source_print3D_resolution, "mm ", cell_size_m, "m ", numpy.nanmin(bot_npim), "-", numpy.nanmax(bot_npim), "m")
bot_npim = resampleDEM(bot_npim, scale_factor)
# re-sample offset mask
@@ -1403,9 +1508,14 @@ def get_GDAL_projection_and_datum(raster):
# based on the full raster's shape and given the model width, recalc the model height
# and the adjusted printres that will give that width from the resampled raster
#
+
+
region_ratio = npim.shape[0] / float(npim.shape[1])
- print3D_width_per_tile = tilewidth # EW
+ print3D_width_per_tile = config.tilewidth # EW
print3D_height_per_tile = (print3D_width_per_tile * num_tiles[0] * region_ratio) / float(num_tiles[1]) # NS
+ if config.tileScale is not None:
+ print3D_width_per_tile, print3D_height_per_tile = get_print3D_dimensions(dem=dem, tile_scale=config.tileScale)
+
print3D_width_total_mm = print3D_width_per_tile * num_tiles[0] # width => EW
print3D_height_total_mm = print3D_width_total_mm * region_ratio # height => NS
adjusted_print3D_resolution = print3D_width_total_mm / float(npim.shape[1])
@@ -1419,12 +1529,21 @@ def get_GDAL_projection_and_datum(raster):
else:
pr("print res is", print3D_resolution_mm, "mm")
- DEM_title = filename[:filename.rfind('.')]
+ # use DEM_name or config_filename from config_path for export mesh filename if it is specified (compare with the default value)
+ if config.DEM_name == TouchTerrainConfig().DEM_name or config.DEM_name is None:
+ if config_filename is not None:
+ DEM_title = config_filename
+ else:
+ # fall back to imported DEM filename
+ DEM_title = importedDEM_filename[:importedDEM_filename.rfind('.')]
+ else:
+ DEM_title = config.DEM_name
# end of B: (local raster file)
+ #endregion
# Make empty zip file in temp_folder, add files into it later
total_size = 0 # size of stl/objs/geotiff file(s) in byes
- full_zip_file_name = temp_folder + os.sep + zip_file_name + ".zip"
+ full_zip_file_name = config.temp_folder + os.sep + config.zip_file_name + ".zip"
#print >> sys.stderr, "zip is in", os.path.abspath(full_zip_file_name)
zip_file = ZipFile(full_zip_file_name, "w", allowZip64=True) # create empty zipfile
@@ -1433,12 +1552,12 @@ def get_GDAL_projection_and_datum(raster):
# Create and store geometry (triangles)
#
- if fileformat != "GeoTiff": # Mesh export
+ if config.fileformat != "GeoTiff": # Mesh export
- if importedDEM == None:
- DEM_name = DEM_name.replace("/","-") # replace / with - to be safe
+ if config.importedDEM == None:
+ config.DEM_name = config.DEM_name.replace("/","-") # replace / with - to be safe
else:
- DEM_name = filename
+ config.DEM_name = importedDEM_filename
# Adjust raster to nice multiples of tiles. If needed, crop raster from right and bottom
remx = npim.shape[1] % num_tiles[0]
@@ -1449,7 +1568,7 @@ def get_GDAL_projection_and_datum(raster):
npim = npim[0:npim.shape[0] - remy, 0:npim.shape[1] - remx]
pr("cropped", old_shape[::-1], "to", npim.shape[::-1])
- if bottom_elevation != None:
+ if config.bottom_elevation != None:
bot_npim = bot_npim[0:bot_npim.shape[0]-remy, 0:bot_npim.shape[1]-remx]
pr("cropped bottom elevation raster to", bot_npim.shape[::-1])
@@ -1471,214 +1590,185 @@ def get_GDAL_projection_and_datum(raster):
#print (npim.shape[0] * cell_size_m) / (print3D_height_total_mm / 1000.0) # NS scale
# if scale X is negative, assume it means scale up to X mm high and calculate required z-scale for that height
- if zscale < 0:
+ if config.zscale < 0:
unscaled_elev_range_m = numpy.nanmax(npim) - numpy.nanmin(npim) # range at 1 x scale
scaled_elev_range_m = unscaled_elev_range_m / print3D_scale_number # convert range from real m to model/map m
- pos_zscale = -zscale
- requested_elev_range_m = -zscale / 1000 # requested range as m (given as mm)
- zscale = requested_elev_range_m / scaled_elev_range_m # z-scale needed to get to a model with the requested range
- pr("From requested model height of", pos_zscale, "mm, calculated a z-scale of", zscale)
+ pos_zscale = -config.zscale
+ requested_elev_range_m = -config.zscale / 1000 # requested range as m (given as mm)
+ config.zscale = requested_elev_range_m / scaled_elev_range_m # z-scale needed to get to a model with the requested range
+ pr("From requested model height of", pos_zscale, "mm, calculated a z-scale of", config.zscale)
# lower cells less/equal a certain elevation?
- if lower_leq is not None:
- assert len(lower_leq) == 2, \
- f"lower_leq should have the format [threshold, offset]. Got {lower_leq}"
+ if config.lower_leq is not None:
+ assert len(config.lower_leq) == 2, \
+ f"lower_leq should have the format [threshold, offset]. Got {config.lower_leq}"
#sf = (print3D_height_total_mm / 1000) / region_size_in_meters[1] # IdenC
#offset = (lower_leq[1] / 1000) / sf
- threshold = lower_leq[0]
- offset = lower_leq[1] / 1000 * print3D_scale_number # scale mm up to real world meters
- offset /= zscale # => unaffected by zscale
+ threshold = config.lower_leq[0]
+ offset = config.lower_leq[1] / 1000 * print3D_scale_number # scale mm up to real world meters
+ offset /= config.zscale # => unaffected by zscale
# Instead of lowering, shift elevations greater than the threshold up to avoid negatives
npim = numpy.where(npim > threshold, npim + offset, npim)
- pr("Lowering elevations <= ", threshold, " by ", offset, "m, equiv. to", lower_leq[1], "mm at map scale")
+ pr("Lowering elevations <= ", threshold, " by ", offset, "m, equiv. to", config.lower_leq[1], "mm at map scale")
# offset (lower) cells highlighted in the offset_masks files
- if offset_masks_lower is not None:
+ if config.offset_masks_lower is not None:
count = 0
for offset_layer in offset_npim:
- offset = offset_masks_lower[count][1] / 1000 * print3D_scale_number # scale mm up to real world meters
- offset /= zscale # account for zscale
+ offset = config.offset_masks_lower[count][1] / 1000 * print3D_scale_number # scale mm up to real world meters
+ offset /= config.zscale # account for zscale
# Invert the mask layer in order to raise all areas not previously masked.
# Subtracting elevation into negative values will cause an invalid STL to be generated.
offset_layer = numpy.where(offset_layer > 0, 0, 1)
offset_layer = numpy.multiply(offset_layer, 1 * offset)
npim = numpy.add(npim, offset_layer)
- pr("Offset masked elevations by raising all non masked areas of", offset_masks_lower[count][0],"by", offset, "m, equiv. to", offset_masks_lower[count][1], "mm at map scale")
+ pr("Offset masked elevations by raising all non masked areas of", config.offset_masks_lower[count][0],"by", offset, "m, equiv. to", config.offset_masks_lower[count][1], "mm at map scale")
npim = numpy.where(npim < 0, 0, npim)
count += 1
# fill (< 0 elevation) holes using a 3x3 footprint. Requires scipy.
# [0] is number of iterations, [1] is number of neighbors
- if fill_holes is not None and (fill_holes[0] > 0 or fill_holes[0] == -1):
- npim = fillHoles(npim, num_iters=fill_holes[0], num_neighbors=fill_holes[1])
-
- #
- # if we have a bottom elevation raster, do some checks and preparations
- # This part was originally in grid_tesselate.py and I'm too lasy to refactor its
- # variable names so I'll just do some aliasing here
- #
- np = numpy
- top = npim
- top_orig = None # maybe used later as backup if top gets NaN'd
- have_nan = np.any(np.isnan(npim)) # check if we have NaNs in the top raster
- throughwater = False # special flag for NaNs in bottom raster
-
- if bottom_elevation is not None:
- bottom = bot_npim
-
- # where top is actually lower than bottom (which can happen with Anson's data), set top to bottom
- top = np.where(top < bottom, bottom, top)
-
- # bool array with True where bottom has NaN values but top does not
- # this is specific to Anson's way of encoding through-water cells
- nan_values = np.logical_and(np.isnan(bottom), np.logical_not(np.isnan(top)))
- if np.any(nan_values) == True:
- bottom[nan_values] = 0 # set bottom NaN values to 0
- throughwater = True # flag for easy checking
-
- # if both have the same value (or very close to) set both to Nan
- # No relative tolerance here as we don't care about this concept here. Set the abs. tolerance to 0.001 m (1 mm)
- close_values = np.isclose(top, bottom, rtol=0, atol=0.001, equal_nan=False) # bool array
-
- # for any True values in array, set corresponding top and bottom cells to NaN
- # Also set NaN flags
- if np.any(close_values) == True:
- # save pre-dilated top for later dilation
- top_pre_dil = top.copy()
- top[close_values] = np.nan # set close values to NaN
-
- # if diagonal cleanup is requested, we need to do it again after setting NaNs
- #clean_up_diags_check(top)
-
- # save original top after setting NaNs so we can skip the undilated NaN cells later
- top_orig = top.copy()
- top = dilate_array(top, top_pre_dil) # dilate the NaN'd top with the original (pre NaN'd) top
-
- bottom[close_values] = np.nan # set close values to NaN
- #clean_up_diags_check(bottom) # re-check for diags
-
- if throughwater == True:
- bottom = dilate_array(bottom) # dilate with 3x3 nanmean #
- else:
- bottom = dilate_array(bottom, top_pre_dil) # dilate the NaN'd bottom with the original (pre NaN'd) top (same as original bottom)
-
- # pre-dilated top is not needed anymore
- del top_pre_dil
-
- # if we have no bottom but have NaNs in top, make a copy and 3x3 dilate it.
- # We'll still use the non-dilated top_orig when we need to skip NaN cells
- elif np.any(np.isnan(npim)):
- top_orig = top.copy() # save original top before it gets dilated
- top = dilate_array(top) # dilate with 3x3 nanmean
-
+ if config.fill_holes is not None and (config.fill_holes[0] > 0 or config.fill_holes[0] == -1):
+ npim = fillHoles(npim, num_iters=config.fill_holes[0], num_neighbors=config.fill_holes[1])
+
+ # TODO: We should fillHoles for the interpolation DEM in normal mode as well since the fillHole'd top right now is used as the top for a complementary difference mesh mode run of TouchTerrain. I haven't seen an actual difference show up yet in meshes.
+
+ # TODO: we can optimize fillHoles by returning the filled indices and reused those precomputed indices for future filling
+
+ #for Difference Mesh mode
+ # bottom mesh must have holes filled to match what happened when the bottom raster was previously used for the interlocking top piece
+ # We need to fillHoles in both cases when bottom_thru_base is enabled or not.
+ # # Not filling bottom holes in the bottom_thru_base=false leads to mesh generation thinks top=notNaN and bottom=NaN means bottom should be forced to base.
+ if config.bottom_elevation is not None:
+ bot_npim = fillHoles(bot_npim, num_iters=config.fill_holes[0], num_neighbors=config.fill_holes[1])
+ # AND/OR NaN out the top raster locations where the bottom raster is not NaN and does NOT equal the top raster
+ # In difference mesh and thru water, we could actually replace the close value NaN operation with a top NaN of all non NaN locations in the bottom raster. Because the thru case assumes that the bottom is the same as the top except for NaN spots.
+ # OR we just don't fill holes for thru water cases
+
+ top_raster_variants = RasterVariants(original=npim.copy(), nan_close=None, dilated=None, edge_interpolation=interp_npim)
+ bottom_raster_variants = RasterVariants(original=None, nan_close=None, dilated=None, edge_interpolation=None)
+ if config.bottom_elevation:
+ bottom_raster_variants.original = bot_npim.copy()
+
+ # Clip original elevation raster to clipping polygon before dilation in raster_preparation()
+ if config.edge_clipping_polygon:
+ #region Mark cells for polygon fitting
+ print('Finding polygon clipping edges')
+ find_polygon_clipping_edges(config=config, dem=dem, surface_raster_variant=[top_raster_variants, bottom_raster_variants], top_hint=top_elevation_hint_npim, print3D_resolution_mm=print3D_resolution_mm)
+ #endregion
+
+
+ if raster_preparation(top=top_raster_variants,
+ bottom=bottom_raster_variants, top_hint=top_elevation_hint_npim,
+ bottom_thru_base=config.bottom_thru_base,
+ bottom_floor_elev=(config.bottom_floor_elev if config.bottom_floor_elev is not None else config.min_elev-1)) is False or top_raster_variants.dilated is None:
+ return
+
+ # Use dilated elevation raster for wall marking to determine where the mesh ends for cells that are enclosed in the clipping raster and thus where some walls should be made
+ if config.edge_clipping_polygon:
+ #region Mark shared edges of the W and N neighbor of each cell for walls if needed
+ print('Marking shared edges for walls')
+ mark_shared_edges_for_walls(polygon_intersection_edge_buckets=top_raster_variants.polygon_intersection_edge_buckets, elevation_raster=top_raster_variants.dilated, direction=(-1, -1))
+ #endregion
+
+
+
+ # Debug: plot all polygon_intersection_geometry(s)
+ if False and top_raster_variants.polygon_intersection_geometry is not None:
+ all_polygon_intersection_geometries: list[shapely.Polygon] = []
+ for j in range(0,top_raster_variants.polygon_intersection_geometry.shape[0]):
+ for i in range(0,top_raster_variants.polygon_intersection_geometry.shape[1]):
+ if top_raster_variants.polygon_intersection_geometry[j][i] is not None:
+ all_polygon_intersection_geometries.extend(top_raster_variants.polygon_intersection_geometry[j][i])
+ if j == 1 and i == 5:
+ print(top_raster_variants.polygon_intersection_geometry[j][i])
+ plot_shapely_geometries_colormap(basePolys=top_raster_variants.polygon_intersection_geometry[j][i])
+ pass # add debug breakpoint here for inspection
+ plot_shapely_geometries_colormap(basePolys=all_polygon_intersection_geometries)
+
+
+
# repair these patterns, which cause non_manifold problems later:
# 0 1 or 1 0
# 1 0 or 0 1
- if clean_diags == True:
- npim = clean_up_diags(npim)
- if bottom_elevation != None:
- bot_npim = clean_up_diags(bot_npim)
- # TODO: check if this is needed as top NaNs dictate if a cell
- # should be skipped or not
-
+ if config.clean_diags:
+ top_raster_variants.apply_closure_to_variants(clean_up_diags)
+ bottom_raster_variants.apply_closure_to_variants(clean_up_diags)
+
#
# deal with min_elev and min_bottom_elev (and user set min_elev)
#
# set minimum elevation for top (will be used by all tiles)
- user_offset = 0 # no offset unless user specified min_elev
min_bottom_elev = None
- if min_elev != None: # user-given minimum elevation (via min_elev argument)
- if bottom_elevation != None: # have a bottom elevation
- min_bottom_elev = numpy.nanmin(bot_npim) #(actual min elev for all tiles)
- user_offset = numpy.nanmin(npim) - min_elev
- min_elev = numpy.nanmin(npim) #(actual min elev for all tiles)
+ if config.min_elev != None: # user-given minimum elevation (via min_elev argument)
+ if bottom_raster_variants.dilated is not None: # have a bottom elevation
+ min_bottom_elev = numpy.nanmin(bottom_raster_variants.dilated) #(actual minimum elev for bottom raster (and presumably all) tiles)
else: # no user-given min_elev
- min_elev = numpy.nanmin(npim)
- if bottom_elevation != None:
- min_bottom_elev = numpy.nanmin(bot_npim)
+ config.min_elev = numpy.nanmin(top_raster_variants.dilated)
+ if bottom_raster_variants.dilated != None:
+ min_bottom_elev = numpy.nanmin(bottom_raster_variants.dilated)
- print(f"elev min/max : {min_elev:.2f} to {numpy.nanmax(npim):.2f}")
- if bottom_elevation != None:
- print(f"bottom elev min/max : {numpy.nanmin(bot_npim):.2f} to {numpy.nanmax(bot_npim):.2f}")
+ print(f"elev min/max : {config.min_elev:.2f} to {numpy.nanmax(top_raster_variants.dilated):.2f}")
+ if bottom_raster_variants.dilated is not None:
+ print(f"bottom elev min/max : {numpy.nanmin(bottom_raster_variants.dilated):.2f} to {numpy.nanmax(bottom_raster_variants.dilated):.2f}")
#
# plot DEM and histogram, save as png
#
- plot_file_name = plot_DEM_histogram(npim, DEM_name, temp_folder)
- print(f"DEM plot and histogram saved as {plot_file_name}", file=sys.stderr)
-
- #
- # create tile info dict
- #
- tile_info = {
- "DEMname": DEM_name, # name of raster requested from earth eng.
- "bottom_elevation": bottom_elevation, # None or name of bottom elevation raster
- "crs" : crs_str, # cordinate reference system, can be EPSG code or UTM zone or any projection
- #"UTMzone" : utm_zone_str, # UTM zone e.g. UTM13N or
- "scale" : print3D_scale_number, # horizontal scale number, 1000 means 1:1000 => 1m in model = 1000m in reality
- "z_scale" : zscale, # z (vertical) scale (elevation exageration) factor
- "pixel_mm" : print3D_resolution_mm, # lateral (x/y) size of a 3D printed "pixel" in mm
- "min_elev" : min_elev, # needed for multi-tile models
- "min_bot_elev" : min_bottom_elev, # needed for multi-tile models
- "user_offset": user_offset, # offset between user given min_elev and actual data min_elev
- "base_thickness_mm" : basethick,
- "bottom_relief_mm": 1.0, # thickness of the bottom relief image (float), must be less than base_thickness
- "folder_name": DEM_title, # folder/zip file name for all tiles
- "tile_centered" : tile_centered, # True: each tile's center is 0/0, False: global (all-tile) 0/0
- "tile_no_x": -1, # current(!) tile number along x
- "tile_no_y": -1, # current(!) tile number along y
- "tile_width": print3D_width_per_tile, # in mmm
- "tile_height": print3D_height_per_tile, # in mmm
- "full_raster_width": -1, # in pixels
- "full_raster_height": -1,
- "fileformat": fileformat,
- "temp_file": None,
- "no_bottom": no_bottom, # omit bottom triangles?
- "bottom_image": bottom_image, # None or name of bottom image file (for relief)
- "ntilesy": ntilesy, # number of tiles in y, ntilesx is not needed here
- "only": only, # if nont None, process only this tile e.g. [1,2]
- "no_normals": no_normals, # calculate normals?
- "geo_transform": geo_transform, # GeoTransform of geotiff
- "use_geo_coords": use_geo_coords, # create STL coords in UTM: None, "centered" or "UTM"
- "smooth_borders": smooth_borders, # optimize borders?
- "clean_diags": clean_diags, # remove diagonal patterns?
- "dirty_triangles": dirty_triangles, # allow creating of better fitting but potentiall degenerate triangles
- "throughwater": throughwater, # special flag for NaNs in bottom raster
- }
+ #plot_file_name = plot_DEM_histogram(top_raster_variants.dilated, config.DEM_name, config.temp_folder)
+ #print(f"DEM plot and histogram saved as {plot_file_name}", file=sys.stderr)
+
+ tile_info = TouchTerrainTileInfo(config=config)
+ tile_info.crs = crs_str
+ tile_info.scale = print3D_scale_number
+ tile_info.pixel_mm = print3D_resolution_mm
+ tile_info.min_bot_elev = min_bottom_elev
+ tile_info.folder_name = DEM_title
+ tile_info.tile_width = print3D_width_per_tile
+ tile_info.tile_height = print3D_height_per_tile
+ tile_info.geo_transform = geo_transform
#
# Make tiles (subsets) of the full raster and generate 3D grid model
#
# num_tiles[0], num_tiles[1]: x, y !
- cells_per_tile_x = int(npim.shape[1] / num_tiles[0]) # tile size in pixels
- cells_per_tile_y = int(npim.shape[0] / num_tiles[1])
+ cells_per_tile_x = int(top_raster_variants.dilated.shape[1] / num_tiles[0]) # tile size in pixels
+ cells_per_tile_y = int(top_raster_variants.dilated.shape[0] / num_tiles[1])
pr("Cells per tile (x/y)", cells_per_tile_x, "x", cells_per_tile_y)
-
- # pad full rasters(s) by one at the fringes
- npim = numpy.pad(npim, (1,1), 'edge') # will duplicate edges, including nan
- if bottom_elevation != None:
- bot_npim = numpy.pad(bot_npim, (1,1), 'edge')
- if top_orig is not None:
- top_orig = numpy.pad(top_orig, (1,1), 'edge')
+ #region pad full rasters(s) by one at the fringes
+ # top_raster_variants.dilated = numpy.pad(top_raster_variants.dilated, (1,1), 'edge') # will duplicate edges, including nan
+ # if bottom_raster_variants.dilated is not None:
+ # bottom_raster_variants.dilated = numpy.pad(bottom_raster_variants.dilated, (1,1), 'edge')
+ # if bottom_raster_variants.original is not None:
+ # bottom_raster_variants.original = numpy.pad(bottom_raster_variants.original, (1,1), 'edge')
+ # if top_raster_variants.original is not None:
+ # top_raster_variants.original = numpy.pad(top_raster_variants.original, (1,1), 'edge')
+ # if top_raster_variants.nan_close is not None:
+ # top_raster_variants.nan_close = numpy.pad(top_raster_variants.nan_close, (1,1), 'edge')
+ def pad_1x1(r: numpy.ndarray):
+ return numpy.pad(r, (1,1), 'edge')
+ top_raster_variants.apply_closure_to_variants(pad_1x1)
+ bottom_raster_variants.apply_closure_to_variants(pad_1x1)
+ #endregion
# store size of full raster
- tile_info["full_raster_height"], tile_info["full_raster_width"] = npim.shape
+ tile_info.full_raster_height, tile_info.full_raster_width = top_raster_variants.dilated.shape
# Warn that we're only processing one tile
- process_only = tile_info["only"]
+ process_only = tile_info.config.only
if process_only != None:
pr("Only processing tile:", process_only)
- CPU_cores_to_use = 1 # set to SP
+ config.CPU_cores_to_use = 1 # set to SP
# within the padded full raster, grab tiles - but each with a 1 cell fringe!
- tile_list = [] # list of tiles to be processed via multiprocessing.map()
+ tile_list: list[ProcessingTile] = [] # list of tiles to be processed via multiprocessing.map()
for tx in range(num_tiles[0]):
for ty in range(num_tiles[1]):
#print tx,ty
@@ -1689,53 +1779,46 @@ def get_GDAL_projection_and_datum(raster):
start_y = ty * cells_per_tile_y
end_y = start_y + cells_per_tile_y + 1 + 1
- tile_elev_raster = npim[start_y:end_y, start_x:end_x] # [y,x]
+ #tile_elev_raster = top_raster_variants.dilated[start_y:end_y, start_x:end_x].copy() # [y,x]
#print tile_elev_raster.astype(int)
# Jan 2019: for some reason, changing one tile's raster in process_tile also changes parts of another
# tile's raster (???) So I'm making the elev arrays r/o here and make a copy in process_raster
- tile_elev_raster.flags.writeable = False
-
-
- if bottom_elevation != None :
- tile_bot_elev_raster = bot_npim[start_y:end_y, start_x:end_x] # [y,x]
- tile_bot_elev_raster.flags.writeable = False
- else:
- tile_bot_elev_raster = None
-
- tile_elev_orig_raster = None
- if top_orig is not None:
- tile_elev_orig_raster = top_orig[start_y:end_y, start_x:end_x]
- tile_elev_orig_raster.flags.writeable = False
-
+ #tile_elev_raster.flags.writeable = False
+
+ tile_top_raster_variants = top_raster_variants.copy_tile_raster_variants(start_y, end_y, start_x, end_x)
+
+ tile_bottom_raster_variants = bottom_raster_variants.copy_tile_raster_variants(start_y, end_y, start_x, end_x)
+
# add to tile_list
- tile_info["tile_no_x"] = tx + 1
- tile_info["tile_no_y"] = ty + 1
- my_tile_info = tile_info.copy() # make a copy of the global info, so we can store tile specific info in during processing
+ tile_info.tile_no_x = tx + 1
+ tile_info.tile_no_y = ty + 1
+ my_tile_info = copy.copy(tile_info) # make a copy of the global info, so we can store tile specific info in during processing
# if raster is too large, use temp files to create the tile STL/obj files
- if tile_info["full_raster_height"] * tile_info["full_raster_width"] > max_cells_for_memory_only:
+ if tile_info.full_raster_height * tile_info.full_raster_width > config.max_cells_for_memory_only:
# use a temp file in local tmp folder
# Note: yes, I tried using a named tempfile, which works nicely except for MP and it's too hard to figure out the issue with MP
- mytempfname = f"{temp_folder}{os.sep}{zip_file_name}{tile_info['tile_no_x']}{tile_info['tile_no_y']}.tmp"
+ mytempfname = f"{config.temp_folder}{os.sep}{config.zip_file_name}{tile_info.tile_no_x}{tile_info.tile_no_y}.tmp"
# store temp file names (not file objects), MP will create file objects during processing
- my_tile_info["temp_file"] = mytempfname
+ my_tile_info.temp_file = mytempfname
# assemble tile to be processed
- tile = (my_tile_info, tile_elev_raster, tile_bot_elev_raster, tile_elev_orig_raster) # leave it to process_tile() to unwrap the info and data parts
+ #tile = (my_tile_info, tile_elev_raster, tile_bot_elev_raster, tile_elev_pre_dil_raster, tile_elev_orig_full_raster) # leave it to process_tile() to unwrap the info and data parts
+ tile = ProcessingTile(tile_info=my_tile_info, top=tile_top_raster_variants, bottom=tile_bottom_raster_variants)
# if we only process one tile ...
if process_only == None: # "only" parameter was not given
tile_list.append(tile)
else:
- if process_only[0] == tile_info['tile_no_x'] and process_only[1] == tile_info['tile_no_y']:
+ if process_only[0] == tile_info.tile_no_x and process_only[1] == tile_info.tile_no_y:
tile_list.append(tile) # got the only tile
else:
- print("process only is:", process_only, ", skipping tile", tile_info['tile_no_x'], tile_info['tile_no_y'])
+ print("process only is:", process_only, ", skipping tile", tile_info.tile_no_x, tile_info.tile_no_y)
- if tile_info["full_raster_height"] * tile_info["full_raster_width"] > max_cells_for_memory_only:
- logger.debug("tempfile or memory? number of pixels:" + str(tile_info["full_raster_height"] * tile_info["full_raster_width"]) + ">" + str(max_cells_for_memory_only) + " => using temp file")
+ if tile_info.full_raster_height * tile_info.full_raster_width > config.max_cells_for_memory_only:
+ logger.debug("tempfile or memory? number of pixels:" + str(tile_info.full_raster_height * tile_info.full_raster_width) + ">" + str(config.max_cells_for_memory_only) + " => using temp file")
# single core processing: just work on the list sequentially, don't use multi-core processing.
@@ -1744,7 +1827,7 @@ def get_GDAL_projection_and_datum(raster):
# "temp_file" is None, we got a buffer, but if "temp_file" is a string, we got a file of that name
# [1] can either be the buffer or again the name of the temp file we just wrote (which is redundant, i know ...)
# None means no MP
- if num_tiles[0] * num_tiles[1] == 1 or CPU_cores_to_use == 1 or CPU_cores_to_use == None:
+ if num_tiles[0] * num_tiles[1] == 1 or config.CPU_cores_to_use == 1 or config.CPU_cores_to_use == None:
pr("using single-core only (multi-core is currently broken :(")
processed_list = []
# Convert each tile into a list: [0]: updated tile info, [1,2,3]: rasters (or None)
@@ -1757,20 +1840,20 @@ def get_GDAL_projection_and_datum(raster):
processed_list.append(pt) # append to list of processed tiles
else:
try: # delete temp file b/c it's only a STLb header from a tile with no elevations
- os.remove(pt[0]["temp_file"])
+ os.remove(pt[0].temp_file)
except Exception as e:
- logger.error("Error removing" + str(pt[0]["temp_file"]) + " " + str(e))
+ logger.error("Error removing " + str(pt[0].temp_file) + " " + str(e))
# use multi-core processing
else:
#if CPU_cores_to_use is 0(!) us all cores, otherwise use that number
- if CPU_cores_to_use == 0:
+ if config.CPU_cores_to_use == 0:
num_cores = None
num_core_str = "all"
else:
- num_cores = CPU_cores_to_use
+ num_cores = config.CPU_cores_to_use
num_core_str = str(num_cores)
# TODO: Using 0 here that needs to become None is confusing, but too esoteric to clean up ..
# Better: make default 1, else use MP with None (meaning all)
@@ -1802,7 +1885,7 @@ def get_GDAL_projection_and_datum(raster):
# the tile width/height was written into tileinfo during processing
- pr(f"\n{num_tiles[0]} x {num_tiles[1]} tiles, tile size {tile_info['tile_width']:.2f} x {tile_info['tile_height']:.2f} mm\n")
+ pr(f"\n{num_tiles[0]} x {num_tiles[1]} tiles, tile size {tile_info.tile_width:.2f} x {tile_info.tile_height:.2f} mm\n")
# delete tile list, as the elevation arrays are no longer needed
del tile_list
@@ -1815,11 +1898,14 @@ def get_GDAL_projection_and_datum(raster):
#print "start of putting tiles into zip file")
for p in processed_list:
tile_info = p[0] # per-tile info
- tile_name = f"{DEM_title}_tile_{tile_info['tile_no_x']}_{tile_info['tile_no_y']}.{fileformat[:3]}" # name of file inside zip
+
+ tile_label = f"_tile_{tile_info.tile_no_x}_{tile_info.tile_no_y}" if len(processed_list) > 1 else ""
+
+ tile_name = f"{DEM_title}{tile_label}.{config.fileformat[:3]}" # name of file inside zip
buf= p[1] # either a string or a file object
- if tile_info.get("temp_file") != None: # if buf is a file
- fname = tile_info["temp_file"]
+ if tile_info.temp_file != None: # if buf is a file
+ fname = tile_info.temp_file
stl_list = add_to_stl_list(fname, stl_list)
zip_file.write(fname , tile_name) # write temp file into zip
else:
@@ -1827,12 +1913,12 @@ def get_GDAL_projection_and_datum(raster):
stl_list = add_to_stl_list(buf, stl_list)
- total_size += tile_info["file_size"]
- logger.debug("adding tile %d %d, total size is %d" % (tile_info["tile_no_x"],tile_info["tile_no_y"], total_size))
+ total_size += tile_info.file_size
+ logger.debug("adding tile %d %d, total size is %d" % (tile_info.tile_no_x,tile_info.tile_no_y, total_size))
# print size and elev range
- pr("tile", tile_info["tile_no_x"], tile_info["tile_no_y"], ": height: ", tile_info["min_elev"], "-", tile_info["max_elev"], "mm",
- ", file size:", round(tile_info["file_size"]), "Mb")
+ pr("tile", tile_info.tile_no_x, tile_info.tile_no_y, ": height: ", tile_info.config.min_elev, "-", tile_info.max_elev, "mm",
+ ", file size:", round(tile_info.file_size), "Mb")
pr("\ntotal size for all tiles:", round(total_size, 1), "Mb")
@@ -1843,17 +1929,17 @@ def get_GDAL_projection_and_datum(raster):
dem = None # Python GDAL's way of closing/freeing the raster, needed to be able to delete the inital geotiff
# clean up data for offset_masks
- if offset_masks_lower is not None:
+ if config.offset_masks_lower is not None:
for offset_layer in offset_npim:
del offset_layer
# make k3d render
- if kd3_render == True and (fileformat == "STLa" or fileformat == "STLb"):
- if tile_info.get("temp_file") != None:
- html_file = k3d_render_to_html(stl_list, temp_folder, buffer=False)
+ if config.kd3_render == True and (config.fileformat == "STLa" or config.fileformat == "STLb"):
+ if tile_info.temp_file != None:
+ html_file = k3d_render_to_html(stl_list, config.temp_folder, buffer=False)
else:
- html_file = k3d_render_to_html(stl_list, temp_folder, buffer=True)
+ html_file = k3d_render_to_html(stl_list, config.temp_folder, buffer=True)
zip_file.write(html_file, "k3d_plot.html") # write into zip
@@ -1861,7 +1947,7 @@ def get_GDAL_projection_and_datum(raster):
for p in processed_list:
tile_info = p[0]
buf= p[1]
- if tile_info.get("temp_file") != None:
+ if tile_info.temp_file != None:
try:
os.remove(fname) # on windows remove closed file manually
except Exception as e:
@@ -1875,18 +1961,18 @@ def get_GDAL_projection_and_datum(raster):
print("zip finished:", datetime.datetime.now().time().isoformat())
# for mesh output add (full) geotiff we got from EE to zip
- if importedDEM == None:
+ if config.importedDEM == None:
total_size += os.path.getsize(GEE_dem_filename) / 1048576
zip_file.write(GEE_dem_filename, DEM_title + ".tif")
pr("added full geotiff as " + DEM_title + ".tif")
- if fileformat != "GeoTiff": # for now only for mesh output
+ if config.fileformat != "GeoTiff": # for now only for mesh output
zip_file.write(plot_file_name, DEM_title + "_DEMandHistogram.png")
pr("added histogram of elevation values as " + DEM_title + "_DEMandHistogram.png")
# add png from Google Maps static (ISU server doesn't use that b/c it eats too much into our free google maps allowance ...)
- if map_img_filename != None:
- zip_file.write(map_img_filename, DEM_title + ".jpg")
+ if config.map_img_filename != None:
+ zip_file.write(config.map_img_filename, DEM_title + ".jpg")
pr("added map of area as " + DEM_title + ".jpg")
pr("\nprocessing finished: " + datetime.datetime.now().time().isoformat())
@@ -1900,7 +1986,7 @@ def get_GDAL_projection_and_datum(raster):
zip_file.close() # flushes zip file
# remove geotiff d/led from EE
- if importedDEM == None:
+ if config.importedDEM == None:
try:
os.remove(GEE_dem_filename)
except Exception as e:
@@ -1914,18 +2000,18 @@ def get_GDAL_projection_and_datum(raster):
print("Error removing logfile " + str(log_file_name) + " " + str(e), file=sys.stderr)
# remove map image
- if map_img_filename != None:
+ if config.map_img_filename != None:
try:
- os.remove(map_img_filename)
+ os.remove(config.map_img_filename)
except Exception as e:
- print("Error removing map image" + str(map_img_filename) + " " + str(e), file=sys.stderr)
+ print("Error removing map image" + str(config.map_img_filename) + " " + str(e), file=sys.stderr)
# remove plot+histo file
- if fileformat != "GeoTiff":
- try:
- os.remove(plot_file_name)
- except Exception as e:
- print("Error removing plot_with_histogram.png " + str(plot_file_name) + " " + str(e), file=sys.stderr)
+ # if config.fileformat != "GeoTiff":
+ # try:
+ # os.remove(plot_file_name)
+ # except Exception as e:
+ # print("Error removing plot_with_histogram.png " + str(plot_file_name) + " " + str(e), file=sys.stderr)
# return total size in Mega bytes and location of zip file
return total_size, full_zip_file_name
diff --git a/touchterrain/common/Vertex.py b/touchterrain/common/Vertex.py
new file mode 100644
index 00000000..a8b5e538
--- /dev/null
+++ b/touchterrain/common/Vertex.py
@@ -0,0 +1,60 @@
+class vertex:
+
+ # dict of index value for each vertex
+ # key is tuple of coordinates, value is a unique index
+ vertex_index_dict = -1
+
+ coords: tuple[float, ...]
+
+ def __init__(self, x,y,z):
+ self.coords = tuple([float(d) for d in (x,y,z)]) # made this a tuple (zigzag won't work wth this anymore but it's not used anyway ...)
+ vdict = vertex.vertex_index_dict # class attribute
+
+ # for non obj file this is set to -1, and there's no need to deal with vertex indices
+ if vdict != -1:
+ # This creates a dict (a grid class attribute) with a tuple of the
+ # 3 coords as key and a int as value. The int is a running index i.e. for each new
+ # (not yet hashed) vertex this index just increases by 1, based on the current number of dict
+ # entries. If a vertex has coords that already exist in the dict, nothing needs to be done.
+ # This ensures that each index number is unique but can be shared by multiple indices
+ # (e.g. when 2 triangles have vertices at exactly the same coords)
+ # as it's easy to look up the index based on self.coords, a vertex does not actually
+ # have to store its index.
+
+ # if we don't have an index value for these coords (as key)
+ if self.coords not in vdict: # can't hash list
+ vdict[self.coords] = len(vdict) # and set next running index as new value for key
+ #print(self.coords, "now has idx", self.vert_idx) # DEBUG
+ else: # this vertex has an idx in vdict
+ #print(self.coords, "already has idx", vdict[tuple(self.coords)]) # DEBUG
+ pass
+
+ def get_id(self):
+ '''return Id for my coords'''
+ return vertex.vertex_index_dict[self.coords]
+
+ def get(self):
+ "returns [x,y,z] list of vertices"
+ return self.coords
+
+ def __str__(self):
+ return "%.2f %.2f %.2f " % (self.coords[0], self.coords[1], self.coords[2])
+
+ def __getitem__(self, index):
+ "enables use of index brackets for vertex objects: v[0] returns coords[0]"
+ return self.coords[index]
+
+ def vertex_rounded_to_precision(self, decimals: int) -> "vertex":
+ def round_float_to_precision(decimals: int, input: float):
+ return round(input, decimals)
+
+ intermediate_list = []
+
+ for c in self.coords:
+ intermediate_list.append(round_float_to_precision(decimals=decimals, input=c))
+
+ if len(intermediate_list) == 3:
+ return vertex(*intermediate_list)
+ else:
+ raise ValueError(f"len(intermediate_list) was not 3, got {len(intermediate_list)}")
+
diff --git a/touchterrain/common/grid_tesselate.py b/touchterrain/common/grid_tesselate.py
index 738942e1..11cf0fac 100644
--- a/touchterrain/common/grid_tesselate.py
+++ b/touchterrain/common/grid_tesselate.py
@@ -28,24 +28,38 @@
# multiple grids are processed together.
# CH July 2015
-import numpy as np
+import io
import warnings # for muting warnings about nan in e.g. nanmean()
-import struct # for making binary STL
-import sys
import multiprocessing
-import io
import os
import shutil
+import struct # for making binary STL
+import sys
# get root logger, will later be redirected into a logfile
import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)
+from typing import Union, Any, Callable
+
+import numpy as np
+import shapely
+
+from touchterrain.common.Vertex import vertex
+from touchterrain.common.Quad import quad
+
from touchterrain.common.vectors import Vector, Point # local copy of vectors package which was no longer working in python 3
import touchterrain.common.utils as utils
+from touchterrain.common.tile_info import TouchTerrainTileInfo
+from touchterrain.common.RasterVariants import RasterVariants
+from touchterrain.common.BorderEdge import BorderEdge
+
+from touchterrain.common.shapely_utils import flatten_geometries
+from touchterrain.common.shapely_polygon_utils import polygon_to_list_of_vertex, polygons_equal_3d
+from touchterrain.common.interpolate_Z import interpolate_z_planar
# function to calculate the normal for a triangle
def get_normal(tri):
@@ -68,235 +82,31 @@ def get_normal(tri):
normal = [c.x/m, c.y/m, c.z/m]
return normal
-
-class vertex:
-
- # dict of index value for each vertex
- # key is tuple of coordinates, value is a unique index
- vertex_index_dict = -1
-
- def __init__(self, x,y,z):
- self.coords = tuple([float(d) for d in (x,y,z)]) # made this a tuple (zigzag won't work wth this anymore but it's not used anyway ...)
- vdict = vertex.vertex_index_dict # class attribute
-
- # for non obj file this is set to -1, and there's no need to deal with vertex indices
- if vdict != -1:
- # This creates a dict (a grid class attribute) with a tuple of the
- # 3 coords as key and a int as value. The int is a running index i.e. for each new
- # (not yet hashed) vertex this index just increases by 1, based on the current number of dict
- # entries. If a vertex has coords that already exist in the dict, nothing needs to be done.
- # This ensures that each index number is unique but can be shared by multiple indices
- # (e.g. when 2 triangles have vertices at exactly the same coords)
- # as it's easy to look up the index based on self.coords, a vertex does not actually
- # have to store its index.
-
- # if we don't have an index value for these coords (as key)
- if self.coords not in vdict: # can't hash list
- vdict[self.coords] = len(vdict) # and set next running index as new value for key
- #print(self.coords, "now has idx", self.vert_idx) # DEBUG
- else: # this vertex has an idx in vdict
- #print(self.coords, "already has idx", vdict[tuple(self.coords)]) # DEBUG
- pass
-
- def get_id(self):
- '''return Id for my coords'''
- return vertex.vertex_index_dict[self.coords]
-
- def get(self):
- "returns [x,y,z] list of vertices"
- return self.coords
-
- def __str__(self):
- return "%.2f %.2f %.2f " % (self.coords[0], self.coords[1], self.coords[2])
-
- def __getitem__(self, index):
- "enables use of index brackets for vertex objects: v[0] returns coords[0]"
- return self.coords[index]
-
-
-
-
-class quad:
- """return list of 2 triangles (counterclockwise) per quad
- wall quads will NOT subdivide their quad into subquads if they are too skinny
- as this would require to re-index the entire mesh. However, I left the subdive
- stuff in in case we want to re-visit it later.
- """
- # class attribute, use quad.too_skinny_ratio
- too_skinny_ratio = 0.1 # border quads with a horizontal vs vertical ratio smaller than this will be subdivided
-
- # order is NE, NW, SW, SE
- # can be just a triangle, if it just any 3 ccw consecutive corners
- def __init__(self, v0, v1, v2, v3=None):
- self.vl = [v0, v1, v2, v3]
- self.subdivide_by = None # if not None, we need to subdivide the quad into that many subquads
-
- def get_copy(self):
- ''' returns a copy of the quad'''
- vl = self.vl[:]
- cp = quad(vl[0], vl[1], vl[2], vl[3])
- return cp
-
- def check_if_too_skinny(self, direction):
- '''if a border quad is too skinny it will to be subdivided into multiple quads'''
- #print direction, [str(v) for v in self.vl]
-
- # order of verts will be different for N,S vs E,W walls!
- if direction in ("S", "N"): # '-49.50 49.50 0.00 ', '-49.50 49.50 10.00 ', '-50.00 49.50 10.00 ', '-50.00 49.50 0.00 '
- horz_dist = abs(self.vl[0][0] - self.vl[2][0]) # x diff of v0 and v2
- max_elev = max(self.vl[1][2], self.vl[2][2]) # max elevation of v1 vs v2
- min_elev = min(self.vl[0][2], self.vl[3][2]) # min elevation v0 vs v3
- vert_dist = max_elev - min_elev # z diff of v0 and v1
- else: # -49.50 50.00 10.00 ', '-49.50 49.50 10.00 ', '-49.50 49.50 0.00 ', '-49.50 50.00 0.00 '
- horz_dist = abs(self.vl[0][1] - self.vl[1][1]) # y diff of v0 and v1
- max_elev = max(self.vl[0][2], self.vl[1][2]) # max elevation of v0 vs v1
- min_elev = min(self.vl[2][2], self.vl[3][2]) # min elevation v2 vs v3
- vert_dist = max_elev - min_elev # z diff of v0 and v1
- if vert_dist == 0: return # walls can be 0 height
-
- ratio = horz_dist / float (vert_dist)
- #print ratio, quad.too_skinny_ratio, quad.too_skinny_ratio / ratio
- if ratio < quad.too_skinny_ratio:
- sb = int(quad.too_skinny_ratio / ratio)
- self.subdivide_by = sb
-
- def get_triangles(self):
- "return list of 2 triangles (counterclockwise)"
- v0,v1,v2,v3 = self.vl[0],self.vl[1],self.vl[2],self.vl[3]
- t0 = (v0, v1, v2) # verts of first triangle
-
- # if v3 is None, we only return t0
- if v3 != None:
- t1 = (v0, v2, v3) # verts of second triangle
- return (t0,t1)
- else:
- return(t0, None)
-
- # this isn't used anymore
- def get_triangles_with_indexed_verts(self):
- "return list of 2 triangles (counterclockwise) as vertex indices"
-
- vertidx = [] # list of the 4 verts as index
- for v in self.vl: # quad as list of 4 verts, each as (x,y,z)
- if v != None: # v3 could be None
- vi = v.get_id()
- vertidx.append(vi)
- #print v,vi
-
- t0 = (vertidx[0], vertidx[1], vertidx[2]) # verts of first triangle
- # if v3 is None(i.e. we didn't get a 4. index), we only return t0
- if len(vertidx) > 3:
- t1 = (vertidx[0], vertidx[2], vertidx[3]) # verts of second triangle
- return (t0,t1)
- else:
- return(t0, None)
-
- '''
- # splits skinny triangles
- def get_triangles(self, direction=None):
- """return list of 2 triangles (counterclockwise) per quad
- wall quads will subdivide their quad into subquads if they are too skinny
- """
- v0,v1,v2,v3 = self.vl[0],self.vl[1],self.vl[2],self.vl[3]
-
- # do we need to subdivide?
- if self.subdivide_by is None: # no, either not a wall or a chunky wall
- t0 = (v0, v1, v2) # verts of first triangle
- t1 = (v0, v2, v3) # verts of second triangle
- return (t0,t1)
-
- else:
- # subdivde into sub quads and return their triangles
-
- # order of verts will be different for N,S vs E,W walls!
- if direction in ("S", "N"): # '-49.50 49.50 0.00 ', '-49.50 49.50 10.00 ', '-50.00 49.50 10.00 ', '-50.00 49.50 0.00 '
- horz_dist = abs(self.vl[0][0] - self.vl[2][0]) # x diff of v0 and v2
- max_elev = max(self.vl[1][2], self.vl[2][2]) # max elevation of v1 vs v2
- min_elev = min(self.vl[0][2], self.vl[3][2]) # min elevation v0 vs v3
- vert_dist = max_elev - min_elev # z diff of v0 and v1
- else: # -49.50 50.00 10.00 ', '-49.50 49.50 10.00 ', '-49.50 49.50 0.00 ', '-49.50 50.00 0.00 '
- horz_dist = abs(self.vl[0][1] - self.vl[1][1]) # y diff of v0 and v1
- max_elev = max(self.vl[0][2], self.vl[1][2]) # max elevation of v0 vs v1
- min_elev = min(self.vl[2][2], self.vl[3][2]) # min elevation v2 vs v3
- vert_dist = max_elev - min_elev # z diff of v0 and v1
-
-
-
- tri_list = []
-
- # for finding the height of the sub quads I don't care about the different vert order
- z_list =[v[2] for v in self.vl]
- z_top = max(z_list) # z height of the top (take min() b/c one might be higher)
- z_bot = min(z_list) # z height at bottom
- z_dist = z_top - z_bot # distance to be
-
- #self.subdivide_by = 3 # DEBUG
-
- qheight = z_dist / float(self.subdivide_by) # height (elevation dist) of each quad
- height_list = [ z_top - qheight * i for i in range(self.subdivide_by+1) ] # list of h
-
- # make new subquads and return a list of their triangles
- vl_copy = copy.deepcopy(self.vl) # must make a deep copy, otherwise changing the subquads affect the current quad
- tl = [] # triangle list
-
- bottom_height_list = height_list[1:]
- for n,_ in enumerate(bottom_height_list):
- v0_,v1_,v2_,v3_ = vl_copy[0], vl_copy[1], vl_copy[2],vl_copy[3] # unroll copy
- #print n,v0_,v1_,v2_,v3_
-
- # as order of verts will be different for N,S vs E,W walls we need 2 different cases
- if direction in ("N", "S"):
- top_inds = (1,2)
- bot_inds = (0,3)
- else:
- top_inds = (0,1)
- bot_inds = (2,3)
-
-
- # top verts
- if n > 0: # don't change top z for topmost sub quad
- h = height_list[n]
- v= vl_copy[top_inds[0]] # first vertex of subquad
- v.coords[2] = h # set its z value
- v= vl_copy[top_inds[1]]
- v.coords[2] = h
-
- # bottom verts
- if n < len(bottom_height_list): # don't change bottom z for bottommost sub quad
- h = height_list[n+1]
- v = vl_copy[bot_inds[0]]
- v.coords[2] = h
- v = vl_copy[bot_inds[1]]
- v.coords[2] = h
-
- # make a sub quad
- sq = copy.deepcopy(quad(vl_copy[0], vl_copy[1], vl_copy[2],vl_copy[3])) # each subquad needs to be its own copy
- #print n, sq,
-
- t0,t1 = sq.get_triangles()
- tl.append(t0)
- tl.append(t1)
-
- return tl
- '''
-
- def __str__(self):
- rs =" "
- for n,v in enumerate(self.vl):
- rs = rs + "v" + str(n) + ": " + str(v) + " "
- return rs
-
-
class cell:
'''a cell with a top and bottom quad, constructor: uses refs and does NOT copy ...
except for triangle cells
'''
+ topquad: quad # 4 corner square quad with X,Y,Z
+ bottomquad: quad | None
+ borders: dict[str, quad]
+
+ topSurfacePolygons: list[shapely.Polygon] | None = None
+ "list of polygons (preferably tris) with X,Y,Z to use for the mesh instead of the topquad"
+ bottomSurfacePolygons: list[shapely.Polygon] | None = None
+ "list of polygons (preferably tris) with X,Y,Z to use for the mesh instead of the bottomquad"
+ surfacePolygonBorders: list[quad] | None = None
+ # surface polygon borders should be generated using raster polygon edge buckets BorderEdge wall value
+
def __init__(self, topquad, bottomquad, borders, is_tri_cell=False):
self.topquad = topquad
self.bottomquad = bottomquad
self.borders = borders
self.is_tri_cell = is_tri_cell
+ # Debug: keep the original quads to see what the cell functions changed
+ # self.topquadoriginal = topquad
+ # self.bottomquadtoriginal = bottomquad
+
def __str__(self):
r = hex(id(self)) + "\n top:" + str(self.topquad) + "\n btm:" + str(self.bottomquad) + "\n borders:\n"
for d in ["N", "S", "E", "W"]:
@@ -304,6 +114,30 @@ def __str__(self):
r = r + " " + d + ": " + str(self.borders[d]) + "\n"
return r
+ def meshes_for_model(self) -> list[Union[quad, shapely.Polygon]]:
+ """Returns a list of all the meshes to actually include in the ouitput model for this cell. Meshes are in the form of a quad or shapely.Polygon (triangulated)"""
+ meshes = []
+ if self.topSurfacePolygons:
+ meshes.extend(self.topSurfacePolygons)
+ elif self.topquad:
+ meshes.append(self.topquad)
+ # if we use topquad, we also use cardinal direction borders
+ for k in self.borders: # k is N, S, E, W
+ if self.borders[k] is not False: meshes.append(self.borders[k])
+ # else:
+ # It is possible to have a cell with no top quad or topSurfacePolygon because all volumes in the cell were removed in zero volume check
+ # raise AttributeError("cell has no top quad or topSurfacePolygons")
+
+ if self.bottomSurfacePolygons:
+ meshes.extend(self.bottomSurfacePolygons)
+ elif self.bottomquad:
+ meshes.append(self.bottomquad)
+
+ if self.surfacePolygonBorders:
+ meshes.extend(self.surfacePolygonBorders)
+
+ return meshes
+
def check_for_tri_cell(self):
"""Returns True if cell has borders on 2 consecutive sides False otherwise.
Returns False is cell is already a tri-cell"""
@@ -367,7 +201,79 @@ def convert_to_tri_cell(self):
self.is_tri_cell = True
return None
+
+ def remove_zero_height_volumes(self):
+ """Remove volumes that should have zero height due to the top and bottom Z being equal.
+ """
+ b = self.borders
+ tq = self.topquad.get_copy()
+ bq = self.bottomquad.get_copy()
+ tvl = tq.vl
+ bvl = bq.vl
+
+ """Vertices mapping NW SW SE NE
+ Top 0 1 2 3
+ Bottom 0 3 2 1
+ The vertices seem to be a different mapping than commented in convert_to_tri_cell() and the above mapping makes much more sense for normals' directions. This assumes we are viewing the quad from straight above from the positive Z direction.
+ """
+
+ # All vertexes of the top and bottom quad are at the same Z coordinate so the entire quad has 0 volume.
+ if (tvl[0].coords[2] == bvl[0].coords[2] and
+ tvl[1].coords[2] == bvl[3].coords[2] and
+ tvl[2].coords[2] == bvl[2].coords[2] and
+ tvl[3].coords[2] == bvl[1].coords[2]):
+ self.topquad = None #quad(None, None, None, None)
+ self.bottomquad = None #quad(None, None, None, None)
+ b["N"] = b["W"] = b["S"] = b["E"] = False
+ # (NW case) NW NE SW vertices are same Z, keep tri of SE SW NE
+ elif (tvl[0].coords[2] == bvl[0].coords[2] and
+ tvl[3].coords[2] == bvl[1].coords[2] and
+ tvl[1].coords[2] == bvl[3].coords[2]):
+ self.topquad = quad(tvl[3], tvl[1], tvl[2], None)
+ self.bottomquad = quad(bvl[1], bvl[2], bvl[3], None)
+ b["N"] = False #quad(tvl[1], tvl[3], bvl[1], bvl[3])
+ b["W"] = False
+ # (NE case) NW NE SE vertices are same Z, keep tri of SE SW NW
+ elif (tvl[0].coords[2] == bvl[0].coords[2] and
+ tvl[3].coords[2] == bvl[1].coords[2] and
+ tvl[2].coords[2] == bvl[2].coords[2]):
+ self.topquad = quad(tvl[0], tvl[1], tvl[2], None)
+ self.bottomquad = quad(bvl[0], bvl[2], bvl[3], None)
+ b["N"] = False #quad(tvl[0], tvl[2], bvl[2], bvl[0])
+ b["E"] = False
+ # (SE case) NE SE SW vertices are same Z, keep tri of SW NW NE
+ elif (tvl[3].coords[2] == bvl[1].coords[2] and
+ tvl[1].coords[2] == bvl[3].coords[2] and
+ tvl[2].coords[2] == bvl[2].coords[2]):
+ self.topquad = quad(tvl[3], tvl[0], tvl[1], None)
+ self.bottomquad = quad(bvl[3], bvl[0], bvl[1], None)
+ b["S"] = False #quad(tvl[3], tvl[1], bvl[3], bvl[1])
+ b["E"] = False
+ # (SW case) SE SW NW vertices are same Z, keep tri of NW NE SE
+ elif (tvl[0].coords[2] == bvl[0].coords[2] and
+ tvl[1].coords[2] == bvl[3].coords[2] and
+ tvl[2].coords[2] == bvl[2].coords[2]):
+ self.topquad = quad(tvl[2], tvl[3], tvl[0], None)
+ self.bottomquad = quad(bvl[0], bvl[1], bvl[2], None)
+ b["S"] = False #quad(tvl[2], tvl[0], bvl[0], bvl[2])
+ b["W"] = False
+
+ # for surface polygons we can remove matching tris that have the same Z for all vertex
+ if self.topSurfacePolygons and self.bottomSurfacePolygons:
+ ti = 0
+ while ti < len(self.topSurfacePolygons):
+ match = False
+ bi = 0
+ while bi < len(self.bottomSurfacePolygons):
+ if polygons_equal_3d(self.topSurfacePolygons[ti], self.bottomSurfacePolygons[bi]):
+ del self.topSurfacePolygons[ti]
+ del self.bottomSurfacePolygons[bi]
+ match = True
+ break
+ bi += 1
+ if not match:
+ ti += 1
'''
#profiling decorator
@@ -389,17 +295,70 @@ def wraps(*args, **kwargs):
return wraps
'''
-
-
-
-
+class ProcessingTile:
+ tile_info: TouchTerrainTileInfo
+ top_raster_variants: RasterVariants
+ bottom_raster_variants: Union[None, RasterVariants]
+
+ def __init__(self, tile_info: TouchTerrainTileInfo, top: RasterVariants, bottom: Union[None, RasterVariants]):
+ self.tile_info = tile_info
+ self.top_raster_variants = top
+ self.bottom_raster_variants = bottom
+
+def interpolate_with_NaN(elev: np.ndarray, i, j) -> tuple[float|None, float|None, float|None, float|None]:
+ '''Get elevation of 4 corners of current cell and return them as NEelev, NWelev, SEelev, SWelev
+ If any of the corners is NaN, return None for all 4 corners'''
+
+ # interpolate each corner with possible NaNs, using mean()
+ # Note: if we have 1 or more NaNs, we get a warning: warnings.warn("Mean of empty slice", RuntimeWarning)
+ # but if the result of ANY corner is NaN (b/c it used 4 NaNs), skip this cell entirely by setting it to None instead a cell object
+ with warnings.catch_warnings():
+ warnings.filterwarnings('error')
+ NEar = np.array([elev[j+0,i+0], elev[j-1,i-0], elev[j-1,i+1], elev[j-0,i+1]]).astype(np.float64)
+ NWar = np.array([elev[j+0,i+0], elev[j+0,i-1], elev[j-1,i-1], elev[j-1,i+0]]).astype(np.float64)
+ SEar = np.array([elev[j+0,i+0], elev[j-0,i+1], elev[j+1,i+1], elev[j+1,i+0]]).astype(np.float64)
+ SWar = np.array([elev[j+0,i+0], elev[j+1,i+0], elev[j+1,i-1], elev[j+0,i-1]]).astype(np.float64)
+
+ try:
+ # init all elevs with NaN
+ NEelev = NWelev = SEelev = SWelev = np.nan
+
+ # nanmean() is expensive, so only use it when actually needed
+ # if any of the interp sources are < basethick, leave the corner height as the cell height
+ NEelev = np.nanmean(NEar) if np.isnan(np.sum(NEar)) else (elev[j+0,i+0] + elev[j-1,i-0] + elev[j-1,i+1] + elev[j-0,i+1]) / 4.0
+ NWelev = np.nanmean(NWar) if np.isnan(np.sum(NWar)) else (elev[j+0,i+0] + elev[j+0,i-1] + elev[j-1,i-1] + elev[j-1,i+0]) / 4.0
+ SEelev = np.nanmean(SEar) if np.isnan(np.sum(SEar)) else (elev[j+0,i+0] + elev[j-0,i+1] + elev[j+1,i+1] + elev[j+1,i+0]) / 4.0
+ SWelev = np.nanmean(SWar) if np.isnan(np.sum(SWar)) else (elev[j+0,i+0] + elev[j+1,i+0] + elev[j+1,i-1] + elev[j+0,i-1]) / 4.0
+
+ except RuntimeWarning: # corner is surrounded by NaN elevations - skip this cell
+ #print(j-1, i-1, ": elevation of at least one corner of this cell is NaN - skipping cell")
+ #print " NW",NWelev," NE", NEelev, " SE", SEelev, " SW", SWelev # DEBUG
+ num_nans = sum(np.isnan(np.array([NEelev, NWelev, SEelev, SWelev]))) # is ANY of the corners NaN?
+ if num_nans > 0: # yes, set cell to None and skip it ...
+ # self.cells[j-1, i-1] = None # I commented this out since I have moved interpolate_with_NaN() out of grid class. Not sure what this part does if we return None for the 4 corners already?? -Anson
+ return None, None, None, None
+ else:
+
+ '''
+ print("\n", i,j)
+ print("NE", elev[j+0,i+0], elev[j-1,i-0], elev[j-1,i+1], elev[j-0,i+1], NEelev)
+ print("NW", elev[j+0,i+0], elev[j+0,i-1], elev[j-1,i-1], elev[j-1,i+0], NWelev)
+ print("SE", elev[j+0,i+0], elev[j-0,i+1], elev[j+1,i+1], elev[j+1,i+0], SEelev)
+ print("SW", elev[j+0,i+0], elev[j+1,i+0], elev[j+1,i-1], elev[j+0,i-1], SWelev)
+ '''
+ return NEelev, NWelev, SEelev, SWelev
+
class grid:
"""makes cell data structure from two np arrays (top, bottom) of the same shape."""
#@profile # https://pypi.org/project/memory-profiler/
# I'm unclear why these class attributes need to be created here (added by keerl)
- top = None
- bottom = None
+ tile: ProcessingTile = None
+ # Check self.tile.bottom_raster_variants is not None to check if doing a "difference mesh" mode generation with a bottom array present.
+ # If bottom_raster_variants is None, we only generate from top to flat bottom which is "top mesh" mode.
+
+ bottom_thru_base: bool = False # Indicates if generating the "thru" mode
+
tile_info = None
xmaxidx = None
ymaxidx = None
@@ -410,26 +369,29 @@ class grid:
fo = None
- def __init__(self, top, bottom, top_orig, tile_info):
- '''top: top elevation raster, must hang over by 1 row/column on each side (be already padded)
- bottom: None => bottom elevation is 0, otherwise either a 8 bit raster that will be resized to top's size or a bottom elevation raster
- top_orig: top elevation raster before top dilation
- tile_info: dict with info about the current tile + some tile global settings
+ def __init__(self, tile: ProcessingTile):
+ '''tile: Includes Top and Bottom raster variants and tile_info dict
'''
+ self.tile = tile
+ self.tile_info = tile.tile_info
-
- self.top = top
- self.bottom = bottom
- self.top_orig = top_orig
- self.throughwater = tile_info["throughwater"] # Anson's all-the-way-through water case
- self.tile_info = tile_info
+ self.bottom_thru_base = tile.tile_info.config.bottom_thru_base # Anson's all-the-way-through case
+ self.tile_info = tile.tile_info
- if self.tile_info["fileformat"] == 'obj':
+ if self.tile_info.config.fileformat == 'obj':
vertex.vertex_index_dict = {} # will be filled with vertex indices
self.cells = None # stores the cells in a 2D array of cells
+ if tile.top_raster_variants.dilated is None:
+ print("grid.init() error: No prepared top raster. tile.top_raster_variants.dilated is None")
+ return None
+
+ if tile.bottom_raster_variants is None:
+ print("grid.init() error: No bottom_raster_variants passed in. bottom_raster_variants required to be passed in even if the variants are None.") # We may set bottom_raster_variants to None later to signal that we are not in "difference mesh" mode.
+ return None
+
# Important: in 2D np arrays, x and y coordinate are "flipped" in the sense that when printing top
# top[0,0] appears to the upper left (NW) corner and [0,1] (East) of it:
#[[11 12 13] top[0,1] => 12
@@ -445,188 +407,123 @@ def __init__(self, top, bottom, top_orig, tile_info):
#print "normalized x/y delta:", x_norm_delta, y_norm_delta
# cell size (x and y delta)
- self.cell_size = self.tile_info["pixel_mm"]
+ self.cell_size = self.tile_info.pixel_mm
# does top have NaNs?
- self.tile_info["have_nan"] = np.any(np.isnan(self.top)) # True => we have NaN values
+ #self.tile_info.have_nan = np.any(np.isnan(self.top)) # True => we have NaN values
+ self.tile_info.have_nan = np.any(np.isnan(tile.top_raster_variants.dilated)) # True => we have NaN values
# same for bottom, if we have one
- if self.tile_info["bottom_elevation"] is not None:
- self.tile_info["have_bot_nan"] = np.any(np.isnan(self.bottom))# True => we have NaN values,
+ if self.tile_info.config.bottom_elevation is not None and tile.bottom_raster_variants.dilated is not None:
+ self.tile_info.have_bot_nan = np.any(np.isnan(tile.bottom_raster_variants.dilated))# True => we have NaN values,
# Jan 2019: no idea why, but sometimes changing top also changes the elevation
# array of another tile in the tile list
# for now I make a copy of all rasters and convert them to float
- self.top = self.top.copy().astype(np.float64) # writeable
-
- if self.bottom is not None:
- self.bottom = bottom.copy().astype(np.float64) # writeable
-
- if self.top_orig is not None:
- self.top_orig = top_orig.copy().astype(np.float64)
-
+ # self.top = tile.top_raster_variants.dilated.copy().astype(np.float64) # writeable
+
+ # if tile.top_raster_variants.original is not None:
+ # tile.top_raster_variants.original = tile.top_raster_variants.original.copy().astype(np.float64) # writeable
+
+ # if tile.top_raster_variants.nan_close is not None:
+ # tile.top_raster_variants.nan_close = tile.top_raster_variants.nan_close.copy().astype(np.float64)
+ # if tile.bottom_raster_variants is not None:
+ # if tile.bottom_raster_variants.dilated is not None:
+ # tile.bottom_raster_variants.dilated = tile.bottom_raster_variants.dilated.copy().astype(np.float64) # writeable
+
+ # if tile.bottom_raster_variants.original is not None:
+ # tile.bottom_raster_variants.original = tile.bottom_raster_variants.original.copy().astype(np.float64) # writeable
+
#
# Some sanity checks
#
- # if bottom is not an ndarray, we don't have a bottom raster, so the bottom is a constant 0
- if isinstance(self.bottom, np.ndarray) == False:
- self.bottom = 0
- self.tile_info["have_bottom_array"] = False
+ # if bottom_raster_variants.dilated (last processed variant) is not an ndarray, we don't have a bottom raster, so bottom_raster_variants is set to None
+ if isinstance(tile.bottom_raster_variants.dilated, np.ndarray) == False:
+ tile.bottom_raster_variants = None
# can't have a bottom_image and NaNs in top
- elif tile_info["bottom_image"] is not None and isinstance(self.bottom, np.ndarray) == True and self.tile_info["have_nan"] == True:
- self.tile_info["have_bottom_array"] = False
- self.bottom = 0
+ elif self.tile_info.config.bottom_image is not None and isinstance(tile.bottom_raster_variants.dilated, np.ndarray) == True and self.tile_info.have_nan == True:
+ tile.bottom_raster_variants = None
print("Top has NaN values, requested bottom image will be ignored!")
# bottom is a elevation raster. It's ok to have NaNs in the bottom raster and/or top raster
- elif tile_info["bottom_elevation"] is not None and isinstance(self.bottom, np.ndarray) == True:
- self.tile_info["have_bottom_array"] = True
+ elif self.tile_info.config.bottom_elevation is not None and isinstance(tile.bottom_raster_variants.dilated, np.ndarray) == True:
+ tile.bottom_raster_variants = tile.bottom_raster_variants
# need to use the tilewide min/max for each tile, otherwise the boudaries don't line up perfectly!
-
- if self.tile_info["bottom_elevation"] is not None: # we have a bottom raster
-
- '''
-
- # where top is actually lower than bottom (which can happen with Anson's data), set top to bottom
- self.top = np.where(self.top < self.bottom, self.bottom, self.top)
- #
- # Checking for all-the-way-through bottom NaNs and for top == bottom or bottom < top
- #
-
- # If the bottom has NaNs where top does not, set them to 0
- # This is very specific to Anson's way of creating all-the-way-through water
- # where his preprocessing sets the bottom to NaN for the water. (here called throughwater case)
- if self.tile_info["have_bot_nan"] == True:
- # CH1
- # bool array with True where self.bottom has NaN values but self.top does not
- nan_values = np.logical_and(np.isnan(self.bottom), np.logical_not(np.isnan(self.top)))
- if np.any(nan_values) == True:
- self.bottom[nan_values] = 0 # set bottom NaN values to 0
- self.throughwater = True # flag for easy checking
-
-
- # if both have the same value (or very close to) set both to Nan
- # No relative tolerance here as we don't care about this concept here. Set the abs. tolerance to 0.001 m (1 mm)
- close_values = np.isclose(self.top, self.bottom, rtol=0, atol=0.001, equal_nan=False) # bool array
-
- # for any True values in array, set corresponding top and bottom cells to NaN
- # Also set NaN flags
- if np.any(close_values) == True:
- # save pre-dilated top for later dilation
- top_pre_dil = self.top.copy()
- self.top[close_values] = np.nan # set close values to NaN
-
- # if diagonal cleanup is requested, we need to do it again after setting NaNs
- #clean_up_diags_check(self.top)
-
- # save original top after setting NaNs so we can skip the undilated NaN cells later
- self.top_orig = self.top.copy()
- self.top = dilate_array(self.top, top_pre_dil) # dilate the NaN'd top with the original (pre NaN'd) top
-
- self.bottom[close_values] = np.nan # set close values to NaN
- #clean_up_diags_check(self.bottom) # re-check for diags
-
-
- if self.throughwater == True:
- self.bottom = dilate_array(self.bottom) # dilate with 3x3 nanmean #
- else:
- self.bottom = dilate_array(self.bottom, top_pre_dil) # dilate the NaN'd bottom with the original (pre NaN'd) top (same as original bottom)
-
-
- # as we may have changed the rasters, recalculate min elev (TODO: not sure if this is needed any more)
- self.tile_info["min_elev"] = np.nanmin(self.top)
- self.tile_info["min_bot_elev"] = np.nanmin(self.bottom)
-
- # check if we have NaNs in the top and/or bottom now (any() returns Bools)
- self.tile_info["have_nan"] = np.any(np.isnan(self.top))
- self.tile_info["have_bot_nan"] = np.any(np.isnan(self.bottom))
-
- # pre-dilated top is not needed anymore
- del top_pre_dil
-
- # if we have no bottom but have NaNs in top, make a copy and 3x3 dilate it. We'll still use the non-dilated top
- # when we need to skip NaN cells
- elif self.tile_info["have_nan"] == True:
-
- self.top_orig = self.top.copy() # save original top before it gets dilated
- self.top = dilate_array(self.top) # dilate with 3x3 nanmean
-
- # CH2
- '''
#
- # Convert elevation from real word elevation (m) to model height (mm)
+ # Convert elevation from real word elevation (m) to model print3D height (mm)
#
- if self.tile_info["use_geo_coords"] is None: # Coordinates need to be in mm
-
- scz = 1 / self.tile_info["scale"] * 1000.0 # scale z to mm
+ if self.tile_info.config.use_geo_coords is None: # Coordinates need to be in mm
- if self.tile_info["have_bottom_array"] == False:
- self.top -= self.tile_info["min_elev"] # subtract global min from top to get to 0
+ scz = 1 / self.tile_info.scale * 1000.0 # scale z to mm
- else:
- if self.throughwater == False: # normal water case,
- self.top -= self.tile_info["min_bot_elev"] # subtract global bottom min
- self.bottom -= self.tile_info["min_bot_elev"]
- self.bottom += self.tile_info["user_offset"] # add potential user offset from top (default: 0)
- self.bottom *= scz * self.tile_info["z_scale"] # apply z-scale to bottom
- self.bottom += self.tile_info["base_thickness_mm"] # add base thickness to bottom
-
- # Update with per-tile mm min/max
- self.tile_info["min_bot_elev"] = np.nanmin(self.bottom)
- self.tile_info["max_bot_elev"] = np.nanmax(self.bottom)
- print("bottom min/max (mm) for tile:", self.tile_info["min_bot_elev"], self.tile_info["max_bot_elev"])
- else: # throughwater case
- self.top -= self.tile_info["min_elev"]
- # bottom was set to 0 earlier
-
- self.top += self.tile_info["user_offset"] # add potential user offset from top (default: 0)
- self.top *= scz * self.tile_info["z_scale"] # apply z-scale to top
- self.top += self.tile_info["base_thickness_mm"] # add base thickness to top
+ if tile.bottom_raster_variants is not None: # Top-Bottom difference mesh mode
+ if self.bottom_thru_base == False: # normal case,
+ tile.bottom_raster_variants -= self.tile_info.config.min_elev
+
+ tile.bottom_raster_variants *= scz * self.tile_info.config.zscale # apply z-scale to bottom
+ tile.bottom_raster_variants += self.tile_info.config.basethick # add base thickness to bottom
+
+ if tile.bottom_raster_variants.dilated is not None:
+ # Update with per-tile mm min/max
+ self.tile_info.min_bot_elev = np.nanmin(tile.bottom_raster_variants.dilated)
+ self.tile_info.max_bot_elev = np.nanmax(tile.bottom_raster_variants.dilated)
+ print("bottom min/max (mm) for tile:", self.tile_info.min_bot_elev, self.tile_info.max_bot_elev)
+ else:
+ print("tile.bottom_raster_variants.dilated not found")
+ return None
+ #else:
+ # do nothing in the bottom_thru_base case because we previously set bottom raster to 0
+
+ tile.top_raster_variants -= self.tile_info.config.min_elev
+ tile.top_raster_variants *= scz * self.tile_info.config.zscale # apply z-scale to top
+ tile.top_raster_variants += self.tile_info.config.basethick # add base thickness to top
# post-scale (i.e. in mm) top elevations (for this tile)
- self.tile_info["min_elev"] = np.nanmin(self.top)
- self.tile_info["max_elev"] = np.nanmax(self.top)
- print("top min/max for tile (mm):", self.tile_info["min_elev"], self.tile_info["max_elev"])
+ self.tile_info.config.min_elev = np.nanmin(tile.top_raster_variants.dilated)
+ self.tile_info.max_elev = np.nanmax(tile.top_raster_variants.dilated)
+ print("top min/max for tile (mm):", self.tile_info.config.min_elev, self.tile_info.max_elev)
else: # using geo coords (UTM, meter based) - thickness is meters
# TODO: Just noticed that we don't apply a z-scale to the top. Not sure if we should
- self.bottom = self.tile_info["min_elev"] - self.tile_info["base_thickness_mm"] * 10
- logger.info("Using geo coords with a base thickness of " + str(self.tile_info["base_thickness_mm"] * 10) + " meters")
+ tile.bottom_raster_variants.dilated = self.tile_info.config.min_elev - self.tile_info.config.basethick * 10
+ logger.info("Using geo coords with a base thickness of " + str(self.tile_info.config.basethick * 10) + " meters")
+
+ # After this point, all values are in real print3D units (mm) and 0 is the bottom.
# max index in x and y for "inner" raster
- self.xmaxidx = self.top.shape[1]-2
- self.ymaxidx = self.top.shape[0]-2
+ self.xmaxidx = tile.top_raster_variants.dilated.shape[1]-2
+ self.ymaxidx = tile.top_raster_variants.dilated.shape[0]-2
#print range(1, xmaxidx+1), range(1, ymaxidx+1)
# offset so that 0/0 is the center of this tile (local) or so that 0/0 is the lower left corner of all tiles (global)
- if self.tile_info["tile_centered"] == False: # global offset, best for looking at all tiles together
- self.offsetx = -self.tile_info["tile_width"] * (self.tile_info["tile_no_x"]-1) # tile_no starts with 1! This is the top end of the tile, not 0!
- self.offsety = -self.tile_info["tile_height"] * (self.tile_info["tile_no_y"]-1) + self.tile_info["tile_height"] * self.tile_info["ntilesy"]
+ if self.tile_info.config.tile_centered == False: # global offset, best for looking at all tiles together
+ self.offsetx = -self.tile_info.tile_width * (self.tile_info.tile_no_x-1) # tile_no starts with 1! This is the top end of the tile, not 0!
+ self.offsety = -self.tile_info.tile_height * (self.tile_info.tile_no_y-1) + self.tile_info.tile_height * self.tile_info.config.ntilesy
else: # local centered for printing
- self.offsetx = self.tile_info["tile_width"] / 2.0
- self.offsety = self.tile_info["tile_height"] / 2.0
+ self.offsetx = self.tile_info.tile_width / 2.0
+ self.offsety = self.tile_info.tile_height / 2.0
# geo coords are in meters (UTM). tile_centered is ignored for geo coords
- if self.tile_info["use_geo_coords"] != None:
+ if self.tile_info.config.use_geo_coords != None:
- geo_transform = self.tile_info["geo_transform"]
+ geo_transform = self.tile_info.geo_transform
self.cell_size = abs(geo_transform[1]) # rw pixel size of geotiff in m
tile_width_m = self.xmaxidx * self.cell_size # number of (unpadded) pixels of current tile
tile_height_m = self.ymaxidx * self.cell_size
# Place the tiles so that the center is at 0/0, which is what Blender GIS needs.
- if self.tile_info["use_geo_coords"] == "centered":
+ if self.tile_info.config.use_geo_coords == "centered":
- self.offsetx = -tile_width_m * (self.tile_info["tile_no_x"]-1)
- self.offsety = tile_height_m * self.tile_info["ntilesy"] - tile_height_m * (self.tile_info["tile_no_y"]-1)
+ self.offsetx = -tile_width_m * (self.tile_info.tile_no_x-1)
+ self.offsety = tile_height_m * self.tile_info.config.ntilesy - tile_height_m * (self.tile_info.tile_no_y-1)
# center by half the total size
- self.offsetx += (self.tile_info["full_raster_width"] * self.cell_size) / 2
- self.offsety -= (self.tile_info["full_raster_height"] * self.cell_size) / 2
+ self.offsetx += (self.tile_info.full_raster_width * self.cell_size) / 2
+ self.offsety -= (self.tile_info.full_raster_height * self.cell_size) / 2
# correct for off-by-1 cells
self.offsetx -= self.cell_size
@@ -636,37 +533,37 @@ def __init__(self, top, bottom, top_orig, tile_info):
# UTM coordinates. Not sure what CAD/modelling system uses that but if needed it's an option.
else: # "UTM"
- self.offsetx = -tile_width_m * (self.tile_info["tile_no_x"]-1)
- self.offsety = -tile_height_m * (self.tile_info["tile_no_y"]-1)
+ self.offsetx = -tile_width_m * (self.tile_info.tile_no_x-1)
+ self.offsety = -tile_height_m * (self.tile_info.tile_no_y-1)
self.offsetx = -geo_transform[0] + self.offsetx # UTM x of upper left corner
self.offsety = geo_transform[3] + self.offsety # UTM y
# put corner coordinates tile info dict (may later be needed for 2 bottom triangles)
- if self.tile_info["tile_centered"] == False:
- #print("tile width", self.tile_info["tile_width"])
- #print("tile_no_x", self.tile_info["tile_no_x"])
- #print("tile_no_y", self.tile_info["tile_no_y"])
- #print("tile_height", self.tile_info["tile_height"])
- #print("ntilesy", self.tile_info["ntilesy"])
- self.tile_info["W"] = self.tile_info["tile_width"] * (self.tile_info["tile_no_x"]-1)
- self.tile_info["E"] = self.tile_info["W"] + self.tile_info["tile_width"]
- tot_height = self.tile_info["tile_height"] * self.tile_info["ntilesy"]
+ if self.tile_info.config.tile_centered == False:
+ #print("tile width", self.tile_info.tile_width)
+ #print("tile_no_x", self.tile_info.tile_no_x)
+ #print("tile_no_y", self.tile_info.tile_no_y)
+ #print("tile_height", self.tile_info.tile_height)
+ #print("ntilesy", self.tile_info.ntilesy)
+ self.tile_info.W = self.tile_info.tile_width * (self.tile_info.tile_no_x-1)
+ self.tile_info.E = self.tile_info.W + self.tile_info.tile_width
+ tot_height = self.tile_info.tile_height * self.tile_info.config.ntilesy
# y tiles index goes top(0) DOWN to bottom
- self.tile_info["N"] = tot_height - (self.tile_info["tile_height"] * (self.tile_info["tile_no_y"]-1))
- self.tile_info["S"] = self.tile_info["N"] - self.tile_info["tile_height"]
- #print("WENS", self.tile_info["W"] , self.tile_info["E"], self.tile_info["N"] ,self.tile_info["S"] )
+ self.tile_info.N = tot_height - (self.tile_info.tile_height * (self.tile_info.tile_no_y-1))
+ self.tile_info.S = self.tile_info.N - self.tile_info.tile_height
+ #print("WENS", self.tile_info.W , self.tile_info.E, self.tile_info.N ,self.tile_info.S )
else:
- self.tile_info["W"] = -self.tile_info["tile_width"] / 2
- self.tile_info["E"] = self.tile_info["tile_width"] / 2
- self.tile_info["S"] = -self.tile_info["tile_height"] / 2
- self.tile_info["N"] = self.tile_info["tile_height"] / 2
+ self.tile_info.W = -self.tile_info.tile_width / 2
+ self.tile_info.E = self.tile_info.tile_width / 2
+ self.tile_info.S = -self.tile_info.tile_height / 2
+ self.tile_info.N = self.tile_info.tile_height / 2
def clean_up_diags_check(self, ras):
'''Local function to check for NaNs in the raster and clean up diagonal NaNs if requested'''
if np.any(np.isnan(ras)) == True: # do we have any NaNs?
- if self.tile_info["clean_diags"] == True: # cleanup requested?
+ if self.tile_info.config.clean_diags == True: # cleanup requested?
ras = utils.clean_up_diags(ras)
def create_cells(self):
@@ -677,16 +574,22 @@ def create_cells(self):
for vertex coordinates. Here, only the index part (s[1] and fo[1]) is stored, the vertex coordinates will be
created and stored later based on the keys of the vertex class attribute vertex_index_dict'''
+ if self.tile_info is None:
+ print("create_cells: Error: self.tile_info is None")
+ return
+
+ top: Union[None, np.ndarray] = None
+
# store cells in an array, init to None
self.cells = np.empty([self.ymaxidx, self.xmaxidx], dtype=cell)
# TODO: not sure we need this any more, given that this was done on the full raster
# and after the operations that could have changed the raster
- if self.tile_info["clean_diags"] == True:
- self.top = utils.fillHoles(self.top, 1, 8, True) # fill single holes
- self.top = utils.clean_up_diags(self.top)
- if self.top_orig is not None:
- self.top_orig = utils.clean_up_diags(self.top_orig)
+ # if self.tile_info.config.clean_diags == True:
+ # self.tile.top_raster_variants.dilated = utils.fillHoles(self.tile.top_raster_variants.dilated, 1, 8, True) # fill single holes
+ # self.tile.top_raster_variants.dilated = utils.clean_up_diags(self.tile.top_raster_variants.dilated)
+ # if self.tile.top_raster_variants.nan_close is not None:
+ # self.tile.top_raster_variants.nan_close = utils.clean_up_diags(self.tile.top_raster_variants.nan_close)
# report progress in %
percent = 10
@@ -702,231 +605,418 @@ def create_cells(self):
for i in range(1, self.xmaxidx + 1):# x dim.
#print("y=",j," x=",i, " elev=",top[j,i])
- # for throughwater we must use the pre-dilated, but for NaN'd top only use this check
+ # for bottom_thru_base we must use the pre-dilated, but for NaN'd top only use this check
# same for top with NaNs which have been 3x3 dilated
# dirty_trianglescreates a technically better fit fit of the water into the terrain but will create triangles
# that are collapsed into a line or a point. This should not be a problem for a modern slicer but will
# lead to issues when using the model in a 3D mesh modeling program
- if self.tile_info["have_nan"] == True and self.tile_info["dirty_triangles"] == False:
- top = self.top_orig
+ # Top set here determines which cells to skip based on the cells' values
+ if self.tile_info.have_nan == True and self.tile_info.config.dirty_triangles == False:
+ top = self.tile.top_raster_variants.dilated
else:
- top = self.top
-
+ top = self.tile.top_raster_variants.dilated
+
+ # For Difference Mesh mode + bottom_thru_base
+ if self.tile.bottom_raster_variants is not None and self.tile_info.config.bottom_thru_base:
+ top = self.tile.bottom_raster_variants.nan_close
# if center elevation of current top cell is NaN, set its cell to None and skip the rest
- if self.tile_info["have_nan"] and np.isnan(top[j, i]):
+ if self.tile_info.have_nan and np.isnan(top[j, i]):
self.cells[j-1, i-1] = None
continue
# x/y coords of cell "walls", origin is upper left
- E = (i-1) * self.cell_size - self.offsetx # index -1 as it's ref'ing to top, not ptop
- W = E + self.cell_size # CH Nov 2021: I think E and W are flipped (?) but I must correct for that later somewhere (?)
+ W = (i-1) * self.cell_size - self.offsetx # index -1 as it's ref'ing to top, not ptop
+ E = W + self.cell_size
N = -(j-1) * self.cell_size + self.offsety # y is flipped to negative
S = N - self.cell_size
#print(i,j, " ", E,W, " ", N,S, " ", top[j,i])
- ## Which directions will need to have a wall?
- # True means: we have an adjacent cell and need a wall in that direction
- borders = dict([[drct, False] for drct in ["N", "S", "E", "W"]]) # init with no walls
- # set walls for fringe cells
- if j == 1 : borders["N"] = True
- if j == self.ymaxidx : borders["S"] = True
- if i == 1 : borders["W"] = True
- if i == self.xmaxidx : borders["E"] = True
- def interpolate_with_NaN(elev, i, j):
- '''Get elevation of 4 corners of current cell and return them as NEelev, NWelev, SEelev, SWelev
- If any of the corners is NaN, return None for all 4 corners'''
-
- # interpolate each corner with possible NaNs, using mean()
- # Note: if we have 1 or more NaNs, we get a warning: warnings.warn("Mean of empty slice", RuntimeWarning)
- # but if the result of ANY corner is NaN (b/c it used 4 NaNs), skip this cell entirely by setting it to None instead a cell object
- with warnings.catch_warnings():
- warnings.filterwarnings('error')
- NEar = np.array([elev[j+0,i+0], elev[j-1,i-0], elev[j-1,i+1], elev[j-0,i+1]]).astype(np.float64)
- NWar = np.array([elev[j+0,i+0], elev[j+0,i-1], elev[j-1,i-1], elev[j-1,i+0]]).astype(np.float64)
- SEar = np.array([elev[j+0,i+0], elev[j-0,i+1], elev[j+1,i+1], elev[j+1,i+0]]).astype(np.float64)
- SWar = np.array([elev[j+0,i+0], elev[j+1,i+0], elev[j+1,i-1], elev[j+0,i-1]]).astype(np.float64)
-
- try:
- # init all elevs with NaN
- NEelev = NWelev = SEelev = SWelev = np.nan
-
- # nanmean() is expensive, so only use it when actually needed
- NEelev = np.nanmean(NEar) if np.isnan(np.sum(NEar)) else (elev[j+0,i+0] + elev[j-1,i-0] + elev[j-1,i+1] + elev[j-0,i+1]) / 4.0
- NWelev = np.nanmean(NWar) if np.isnan(np.sum(NWar)) else (elev[j+0,i+0] + elev[j+0,i-1] + elev[j-1,i-1] + elev[j-1,i+0]) / 4.0
- SEelev = np.nanmean(SEar) if np.isnan(np.sum(SEar)) else (elev[j+0,i+0] + elev[j-0,i+1] + elev[j+1,i+1] + elev[j+1,i+0]) / 4.0
- SWelev = np.nanmean(SWar) if np.isnan(np.sum(SWar)) else (elev[j+0,i+0] + elev[j+1,i+0] + elev[j+1,i-1] + elev[j+0,i-1]) / 4.0
-
- except RuntimeWarning: # corner is surrounded by NaN elevations - skip this cell
- #print(j-1, i-1, ": elevation of at least one corner of this cell is NaN - skipping cell")
- #print " NW",NWelev," NE", NEelev, " SE", SEelev, " SW", SWelev # DEBUG
- num_nans = sum(np.isnan(np.array([NEelev, NWelev, SEelev, SWelev]))) # is ANY of the corners NaN?
- if num_nans > 0: # yes, set cell to None and skip it ...
- self.cells[j-1, i-1] = None
- return None, None, None, None
- else:
-
- '''
- print("\n", i,j)
- print("NE", elev[j+0,i+0], elev[j-1,i-0], elev[j-1,i+1], elev[j-0,i+1], NEelev)
- print("NW", elev[j+0,i+0], elev[j+0,i-1], elev[j-1,i-1], elev[j-1,i+0], NWelev)
- print("SE", elev[j+0,i+0], elev[j-0,i+1], elev[j+1,i+1], elev[j+1,i+0], SEelev)
- print("SW", elev[j+0,i+0], elev[j+1,i+0], elev[j+1,i-1], elev[j+0,i-1], SWelev)
- '''
- return NEelev, NWelev, SEelev, SWelev
-
-
- if not self.tile_info["have_nan"]:
+
+
+ #region Make top cell vertices' heights
+ interpolation_top_raster: Union[np.ndarray, None]
+ if not self.tile_info.have_nan:
+ interpolation_top_raster = self.tile.top_raster_variants.dilated
# non NaNs: interpolate elevation of four corners (array order is top[y,x]!)
- NEelev = (self.top[j+0,i+0] + self.top[j-1,i-0] + self.top[j-1,i+1] + self.top[j-0,i+1]) / 4.0
- NWelev = (self.top[j+0,i+0] + self.top[j+0,i-1] + self.top[j-1,i-1] + self.top[j-1,i+0]) / 4.0
- SEelev = (self.top[j+0,i+0] + self.top[j-0,i+1] + self.top[j+1,i+1] + self.top[j+1,i+0]) / 4.0
- SWelev = (self.top[j+0,i+0] + self.top[j+1,i+0] + self.top[j+1,i-1] + self.top[j+0,i-1]) / 4.0
- '''
- print("\n", i,j)
- print("NE",self.top[j+0,i+0],self.top[j-1,i-0],self.top[j-1,i+1],self.top[j-0,i+1], NEelev)
- print("NW",self.top[j+0,i+0],self.top[j+0,i-1],self.top[j-1,i-1],self.top[j-1,i+0], NWelev)
- print("SE",self.top[j+0,i+0],self.top[j-0,i+1],self.top[j+1,i+1],self.top[j+1,i+0], SEelev)
- print("SW",self.top[j+0,i+0],self.top[j+1,i+0],self.top[j+1,i-1],self.top[j+0,i-1], SWelev)
- '''
+ NEelev = (interpolation_top_raster[j+0,i+0] + interpolation_top_raster[j-1,i-0] + interpolation_top_raster[j-1,i+1] + interpolation_top_raster[j-0,i+1]) / 4.0
+ NWelev = (interpolation_top_raster[j+0,i+0] + interpolation_top_raster[j+0,i-1] + interpolation_top_raster[j-1,i-1] + interpolation_top_raster[j-1,i+0]) / 4.0
+ SEelev = (interpolation_top_raster[j+0,i+0] + interpolation_top_raster[j-0,i+1] + interpolation_top_raster[j+1,i+1] + interpolation_top_raster[j+1,i+0]) / 4.0
+ SWelev = (interpolation_top_raster[j+0,i+0] + interpolation_top_raster[j+1,i+0] + interpolation_top_raster[j+1,i-1] + interpolation_top_raster[j+0,i-1]) / 4.0
else:
# NaNs: set borders to True if we have any NaNs in any of the adjacent cells
# Do this only for top as we assume that any bottom raster NaNs are the same as on top
+ # Interpolate with edge_interpolation raster variant if available.
+ interpolation_top_raster = self.tile.top_raster_variants.edge_interpolation
+ if interpolation_top_raster is None:
+ if self.tile.bottom_raster_variants is None:
+ # Normal (not difference mesh) mode
+ # Otherwise use "original" top raster (it's only modified at top_hint mask locs to bottom_floor_elev
+ interpolation_top_raster = self.tile.top_raster_variants.original
+ # Use top.dilated for borders
+ else:
+ # Difference mesh mode
+ interpolation_top_raster = self.tile.top_raster_variants.original
+ if self.tile_info.config.bottom_thru_base:
+ # Use original top raster so we get accurate NaN location and borders
+ interpolation_top_raster = self.tile.top_raster_variants.original
+
# get values for current cell i, j, NEelev, NWelev, SEelev, SWelev
- NEelev, NWelev, SEelev, SWelev = interpolate_with_NaN(self.top, i, j)
- if NEelev is None: # if any of the corners is NaN, we have set the cell to None and can skip it
- continue
-
- # for the through water case or Top NaN, base the walls on the original (non-dilated) top
- if self.tile_info["have_nan"] == True:
- top = self.top_orig
- else:
- top = self.top
+ NEelev, NWelev, SEelev, SWelev = interpolate_with_NaN(interpolation_top_raster, i, j)
- with warnings.catch_warnings():
- warnings.filterwarnings('error')
- try:
- if np.isnan(top[j-1,i]): borders["N"] = True
- if np.isnan(top[j+1,i]): borders["S"] = True
- if np.isnan(top[j,i-1]): borders["W"] = True
- if np.isnan(top[j,i+1]): borders["E"] = True
- except RuntimeWarning:
- pass # nothing wrong - just here to ignore the warning
-
+ # top
+ # set breakpoint for specific points for debugging
+ # if j == 10 and i ==9:
+ # 0==0
+ if NEelev is None: # if any of the corners is NaN, we have set the cell to None and can skip it
+ continue
+
+ # compare values with real print3D heights at this point
+ # Pull values set to bottom_floor_elev (which will be just below basethick) to actual 0 because we added basethick to all raster.
+ if NEelev < self.tile_info.config.basethick:
+ NEelev = 0
+ if NWelev < self.tile_info.config.basethick:
+ NWelev = 0
+ if SEelev < self.tile_info.config.basethick:
+ SEelev = 0
+ if SWelev < self.tile_info.config.basethick:
+ SWelev = 0
+
#
- # Make top and bottom quads and wall. Note that here we flip x and y coordinate axis to the system
- # used in 3D graphics
+ # Note that here we flip x and y coordinate axis to the system used in 3D graphics
#
- # make top quad (x,y,z) vi is the vertex index dict of the grids
- NEt = vertex(E, N, NWelev) # yes, NEt gets the z of NWelev, has to do with coordinate system change
- NWt = vertex(W, N, NEelev)
- SEt = vertex(E, S, SWelev)
- SWt = vertex(W, S, SEelev)
+ # make top quad (x,y,z) vertices vi is the vertex index dict of the grids
+ NEt = vertex(E, N, NEelev)
+ NWt = vertex(W, N, NWelev)
+ SEt = vertex(E, S, SEelev)
+ SWt = vertex(W, S, SWelev)
# a certain vertex order is needed to make the 2 triangles be counter clockwise and so point outwards
- topq = quad(NEt, SEt, SWt, NWt)
+ # top quad vertex order is so that the normal points up
+ topq = quad(NWt, SWt, SEt, NEt)
#print(i, j, topq)
-
+ top_bottom_surface_geometries_2D: list[shapely.Geometry] | None = None
+ top_bottom_surface_polygons_triangulated_2D: list[shapely.GeometryCollection] | None = None # tris for top and bottom surfaces
+ # Check if non-quad top_surface polygon should be used
+ top_surface_polygons_triangulated_3D: list[shapely.Polygon] | None = None
+ # by checking if the cell is NOT contains_properly and if it has polygon_intersection_geometry
+ if (self.tile.top_raster_variants.polygon_intersection_contains_properly is not None and self.tile.top_raster_variants.polygon_intersection_contains_properly[j-1][i-1] == False) and self.tile.top_raster_variants.polygon_intersection_geometry is not None:
+ top_bottom_surface_polygons_triangulated_2D = []
+
+ top_bottom_surface_geometries_2D = self.tile.top_raster_variants.polygon_intersection_geometry[j-1][i-1]
+ if top_bottom_surface_geometries_2D is not None:
+ # We can verify if our shapely utils coordinate converter matches the N W S E made in create_cells. (it does if you adjust for the padding difference)
+ #quadPrint2DCoords = utils.arrayCellCoordToQuadPrint2DCoords(array_coord_2D=(i-1,j-1), cell_size=self.cell_size, tile_y_shape=self.tile.top_raster_variants.polygon_intersection_geometry.shape[0])
+ top_bottom_surface_geometries_2D = self.tile.top_raster_variants.polygon_intersection_geometry[j-1][i-1]
+ top_surface_polygons_triangulated_3D = [] #init array
+ # Only interpolate the Polygons for the top surface
+ for polygon in [item for item in (top_bottom_surface_geometries_2D if top_bottom_surface_geometries_2D else []) if isinstance(item, shapely.Polygon)]:
+ top_bottom_surface_polygons_triangulated_2D.append(shapely.constrained_delaunay_triangles(polygon))
+ for gc in top_bottom_surface_polygons_triangulated_2D:
+ for tri in [item for item in gc.geoms if isinstance(item, shapely.Polygon)]:
+ tri_ccw_order = shapely.orient_polygons(tri, exterior_cw=False)
+ tri_with_z = interpolate_z_planar(geometry_2d=tri_ccw_order, planes_3d=topq.get_triangles_in_polygons(split_rotation=self.tile_info.config.split_rotation))
+ #tri_with_z = interpolate_geometry_with_quad(geometry=tri_ccw_order, quad=topq, split_rotation=self.tile_info.config.split_rotation)
+ if isinstance(tri_with_z, shapely.Polygon):
+ top_surface_polygons_triangulated_3D.append(tri_with_z)
+ else:
+ raise ValueError(f"tri_with_z is not Polygon, it is {type(tri_with_z)}")
+
+ #endregion
+
#
- # make bottom quad
+ #region Make bottom quad
#
- # get corner for bottom array
- if self.tile_info["have_bottom_array"] == True:
-
+ # get corners for bottom array
+ if self.tile.bottom_raster_variants is None:
+ # Normal mode
+ NEelev = NWelev = SEelev = SWelev = 0
+ else:
+ # Difference mode
# for the through water case, simply set the bottom to 0
- if self.throughwater == True:
+ if self.bottom_thru_base == True:
NEelev = NWelev = SEelev = SWelev = 0
else:
# simple interpolation
- if not self.tile_info["have_bot_nan"]:
- NEelev = (self.bottom[j+0,i+0] + self.bottom[j-1,i-0] + self.bottom[j-1,i+1] + self.bottom[j-0,i+1]) / 4.0
- NWelev = (self.bottom[j+0,i+0] + self.bottom[j+0,i-1] + self.bottom[j-1,i-1] + self.bottom[j-1,i+0]) / 4.0
- SEelev = (self.bottom[j+0,i+0] + self.bottom[j-0,i+1] + self.bottom[j+1,i+1] + self.bottom[j+1,i+0]) / 4.0
- SWelev = (self.bottom[j+0,i+0] + self.bottom[j+1,i+0] + self.bottom[j+1,i-1] + self.bottom[j+0,i-1]) / 4.0
+ if not self.tile_info.have_bot_nan:
+ NEelev = (self.tile.bottom_raster_variants.dilated[j+0,i+0] + self.tile.bottom_raster_variants.dilated[j-1,i-0] + self.tile.bottom_raster_variants.dilated[j-1,i+1] + self.tile.bottom_raster_variants.dilated[j-0,i+1]) / 4.0
+ NWelev = (self.tile.bottom_raster_variants.dilated[j+0,i+0] + self.tile.bottom_raster_variants.dilated[j+0,i-1] + self.tile.bottom_raster_variants.dilated[j-1,i-1] + self.tile.bottom_raster_variants.dilated[j-1,i+0]) / 4.0
+ SEelev = (self.tile.bottom_raster_variants.dilated[j+0,i+0] + self.tile.bottom_raster_variants.dilated[j-0,i+1] + self.tile.bottom_raster_variants.dilated[j+1,i+1] + self.tile.bottom_raster_variants.dilated[j+1,i+0]) / 4.0
+ SWelev = (self.tile.bottom_raster_variants.dilated[j+0,i+0] + self.tile.bottom_raster_variants.dilated[j+1,i+0] + self.tile.bottom_raster_variants.dilated[j+1,i-1] + self.tile.bottom_raster_variants.dilated[j+0,i-1]) / 4.0
else:
# Nan aware interpolation
- NEelev, NWelev, SEelev, SWelev = interpolate_with_NaN(self.bottom, i, j)
+ NEelev, NWelev, SEelev, SWelev = interpolate_with_NaN(self.tile.bottom_raster_variants.original, i, j)
+
+ # bottom
+ # set breakpoint for specific points for debugging
+ # if j == 10 and i ==9:
+ # 0==0
+
if NEelev is None: # if any of the corners is NaN, we have set the cell to None and are skippping it
continue # skip this cell
- else:
- NEelev = NWelev = SEelev = SWelev = self.bottom # otherwise use the constant bottom elevation value
+
+ # Pull values set to bottom_floor_elev to actual 0
+ # compare values with real print3D heights at this point
+ if NEelev < self.tile_info.config.basethick:
+ NEelev = 0
+ if NWelev < self.tile_info.config.basethick:
+ NWelev = 0
+ if SEelev < self.tile_info.config.basethick:
+ SEelev = 0
+ if SWelev < self.tile_info.config.basethick:
+ SWelev = 0
# from whatever bottom values we have now, make the bottom quad
# (if we do the 2 tri bottom, these will end up not be used for the bottom but they may be used for any walls ...)
- NEb = vertex(E, N, NWelev)
- NWb = vertex(W, N, NEelev)
- SEb = vertex(E, S, SWelev)
- SWb = vertex(W, S, SEelev)
- botq = quad(NEb, NWb, SWb, SEb)
-
+ NEb = vertex(E, N, NEelev)
+ NWb = vertex(W, N, NWelev)
+ SEb = vertex(E, S, SEelev)
+ SWb = vertex(W, S, SWelev)
+ botq = quad(NWb, NEb, SEb, SWb)
+
+ # Check if non-quad top_surface polygon should be used for bottom quad
+ bottom_surface_polygons_triangulated_3D: list[shapely.Polygon] | None = None
+ if top_bottom_surface_polygons_triangulated_2D is not None:
+ # We can verify if our shapely utils coordinate converter matches the N W S E made in create_cells. (it does if you adjust for the padding difference)
+ #quadPrint2DCoords = utils.arrayCellCoordToQuadPrint2DCoords(array_coord_2D=(i-1,j-1), cell_size=self.cell_size, tile_y_shape=self.tile.top_raster_variants.polygon_intersection_geometry.shape[0])
+
+ bottom_surface_polygons_triangulated_3D = []
+ # Only interpolate the Polygons for the top surface
+ for gc in top_bottom_surface_polygons_triangulated_2D:
+ for tri in [item for item in gc.geoms if isinstance(item, shapely.Polygon)]:
+ tri_cw_order = shapely.orient_polygons(tri, exterior_cw=True)
+ tri_with_z = interpolate_z_planar(geometry_2d=tri_cw_order, planes_3d=botq.get_triangles_in_polygons(split_rotation=self.tile_info.config.split_rotation))
+ if isinstance(tri_with_z, shapely.Polygon):
+ bottom_surface_polygons_triangulated_3D.append(tri_with_z)
+ else:
+ raise TypeError('tri_with_z is not Polygon. interpolate_z_planar did not return the same Polygon type passed in')
+
+ #endregion
+
#print(topq)
#print(botq)
- # Quads for walls: in borders dict, replace any True with a quad of that wall
- if borders["N"] == True: borders["N"] = quad(NEb, NEt, NWt, NWb)
- if borders["S"] == True: borders["S"] = quad(SWb, SWt, SEt, SEb)
- if borders["E"] == True: borders["E"] = quad(NWt, SWt, SWb, NWb)
- if borders["W"] == True: borders["W"] = quad(SEt, NEt, NEb, SEb)
-
- # Make cell
- if self.tile_info["no_bottom"] == True:
+ #
+ #region Make borders
+ #
+
+ # Simple rectangular mesh case with no NaN
+ # Which directions will need to have a wall?
+ # True means: we have an adjacent cell and need a wall in that direction
+ borders = dict([[drct, False] for drct in ["N", "S", "E", "W"]]) # init with no walls
+ # set walls for fringe cells
+ if j == 1 : borders["N"] = True
+ if j == self.ymaxidx : borders["S"] = True
+ if i == 1 : borders["W"] = True
+ if i == self.xmaxidx : borders["E"] = True
+
+ #cell_clipping_intersection_geometry = self.tile.top_raster_variants.polygon_intersection_geometry[j][i] if self.tile.top_raster_variants.polygon_intersection_geometry else None
+ #if cell_clipping_intersection_geometry and len(cell_clipping_intersection_geometry) > 0: # Cell is partially intersecting or sharing edges with polygon
+ if False:
+ pass
+ else: # Cell contained properly in polygon
+ # Figure out which raster array to use when determining borders
+ borders_top_raster: Union[np.ndarray, None] = None
+ if self.tile.bottom_raster_variants is None:
+ # Normal mode
+ borders_top_raster = self.tile.top_raster_variants.dilated
+
+ else:
+ # Difference mesh mode
+ #force dilated top because using predilated version has NaNs at edge which makes extra walls
+ borders_top_raster = self.tile.top_raster_variants.dilated
+
+ #for difference mesh in bottom_thru_base case, check for walls with the nan_close version before dilation
+ if self.bottom_thru_base == True:
+ borders_top_raster = self.tile.top_raster_variants.nan_close
+
+ with warnings.catch_warnings():
+ warnings.filterwarnings('error')
+ try:
+ if np.isnan(borders_top_raster[j-1,i]): borders["N"] = True
+ if np.isnan(borders_top_raster[j+1,i]): borders["S"] = True
+ if np.isnan(borders_top_raster[j,i-1]): borders["W"] = True
+ if np.isnan(borders_top_raster[j,i+1]): borders["E"] = True
+ except RuntimeWarning:
+ pass # nothing wrong - just here to ignore the warning
+
+ # Quads for walls: in borders dict, replace any True with a quad of that wall
+ if borders["N"] == True: borders["N"] = quad(NWb, NWt, NEt, NEb)
+ if borders["S"] == True: borders["S"] = quad(SEb, SEt, SWt, SWb)
+ if borders["E"] == True: borders["E"] = quad(NEt, SEt, SEb, NEb)
+ if borders["W"] == True: borders["W"] = quad(SWt, NWt, NWb, SWb)
+
+ # create borders if there is a top surface polygon using the edge buckets
+ surface_polygon_borders_3D: list[quad] = []
+ if self.tile.top_raster_variants.polygon_intersection_edge_buckets is not None and self.tile.top_raster_variants.polygon_intersection_edge_buckets[j-1][i-1] is not None:
+ # Get list of all BorderEdges with edge geometry that should be walls
+ wall_borderEdges = []
+ buckets = self.tile.top_raster_variants.polygon_intersection_edge_buckets[j-1][i-1]
+ if isinstance(buckets, dict):
+ for bucket in buckets.values():
+ if isinstance(bucket, list):
+ for be in bucket:
+ if isinstance(be, BorderEdge):
+ if be.make_wall:
+ wall_borderEdges.append(be)
+
+ if top_bottom_surface_geometries_2D:
+ top_surface_edges_3D: list[shapely.LineString] = []
+ bot_surface_edges_3D: list[shapely.LineString] = []
+ for geom in top_bottom_surface_geometries_2D:
+ if isinstance(geom, shapely.Polygon):
+ flattened_top_geom = flatten_geometries(geometries=[interpolate_z_planar(geometry_2d=shapely.orient_polygons(geom, exterior_cw=False), planes_3d=topq.get_triangles_in_polygons(split_rotation=self.tile_info.config.split_rotation))], to_single_lines=True)
+ top_surface_edges_3D.extend([item for item in flattened_top_geom if isinstance(item, shapely.LineString)])
+ flattened_bot_geom = flatten_geometries(geometries=[interpolate_z_planar(geometry_2d=shapely.orient_polygons(geom, exterior_cw=True), planes_3d=botq.get_triangles_in_polygons(split_rotation=self.tile_info.config.split_rotation))], to_single_lines=True)
+ bot_surface_edges_3D.extend([item for item in flattened_bot_geom if isinstance(item, shapely.LineString)])
+
+ # Find matching interpolated top and bottom surface edges for all BorderEdges that should be a wall
+ # This match is O(n^2) and optimized to O(n^2/2) in the optimal case by removing matched top/bottom surface edges from the array so matched edges are looked through again
+ for be in wall_borderEdges:
+ topEdgeMatch: shapely.LineString | None = None
+ botEdgeMatch: shapely.LineString | None = None
+ for ti in range(0, len(top_surface_edges_3D)):
+ if be.geometry.equals(top_surface_edges_3D[ti]):
+ topEdgeMatch = top_surface_edges_3D[ti]
+ del top_surface_edges_3D[ti]
+ break
+ for bi in range(0, len(bot_surface_edges_3D)):
+ if be.geometry.equals(bot_surface_edges_3D[bi]):
+ botEdgeMatch = bot_surface_edges_3D[bi]
+ del bot_surface_edges_3D[bi]
+ break
+ if topEdgeMatch and botEdgeMatch:
+ # Success condition where wall BorderEdge and top edge and bottom edge share the same X,Y endpoints
+ top_edge_v0 = vertex(*topEdgeMatch.coords[1])
+ top_edge_v1 = vertex(*topEdgeMatch.coords[0])
+ bot_edge_v0 = vertex(*botEdgeMatch.coords[1])
+ bot_edge_v1 = vertex(*botEdgeMatch.coords[0])
+ tb_wall = quad(top_edge_v0, top_edge_v1, bot_edge_v0, bot_edge_v1)
+ surface_polygon_borders_3D.append(tb_wall)
+ pass
+ elif topEdgeMatch:
+ raise RuntimeError('Border creation: top edge match found but no bot edge match.')
+ elif botEdgeMatch:
+ raise RuntimeError('Border creation: bot edge match found but no top edge match.')
+ else:
+ raise RuntimeError('Border creation: no top edge or bot edge matches')
+ # create border geometry with top and bot edge
+ # top and bot edges are in CW order (viewed from top) from shapely
+
+
+
+
+ #endregion
+
+ #region Make cell
+ if self.tile_info.config.no_bottom == True:
c = cell(topq, None, borders) # omit bottom - do not fill with 2 tris later (may have NaNs)
else:
- if self.tile_info["have_nan"] == True or self.tile_info["have_bottom_array"] == True:
+ if self.tile_info.have_nan == True or self.tile.bottom_raster_variants is not None: #self.tile_info.have_bottom_array == True:
# for through water case make sure this in not one of the dilated cells
c = cell(topq, botq, borders) # full cell: top quad, bottom quad and wall quads
else:
c = cell(topq, None, borders) # omit bottom, will fill with 2 tris later
+
+ # set surface polygons for cell if clipping border affects this cell
+ if top_surface_polygons_triangulated_3D:
+ c.topSurfacePolygons = top_surface_polygons_triangulated_3D
+ if bottom_surface_polygons_triangulated_3D:
+ c.bottomSurfacePolygons = bottom_surface_polygons_triangulated_3D
+ if surface_polygon_borders_3D:
+ c.surfacePolygonBorders = surface_polygon_borders_3D
# DEBUG: store i,j, and central elev
#c.iy = j-1
#c.ix = i-1
#c.central_elev = top[j-1,i-1]
+ if j == 10 and i == 10:
+ pass
+
+ if self.tile.bottom_raster_variants is not None and self.tile_info.config.split_rotation == 1:
+ c.remove_zero_height_volumes()
+
# if we have nan cells, do some postprocessing on this cell to get rid of stair case patterns
# This will create special triangle cells that have a triangle of any orientation at top/bottom, which
# are flagged as is_tri_cell = True, and have only v0, v1 and v2. One border is deleted, the other
# is set as a diagonal wall.
# Note: this will not be done if we have a bottom as it will lead to lots of triangle holes!
- if self.tile_info["have_nan"] == True and self.tile_info["smooth_borders"] == True and self.tile_info["have_bottom_array"] == False:
+ if self.tile_info.have_nan == True and self.tile_info.config.smooth_borders == True and self.tile.bottom_raster_variants is None: #self.tile_info.have_bottom_array == False:
#print(i,j, c.borders)
if c.check_for_tri_cell():
c.convert_to_tri_cell() # collapses top and bot quads into a triangle quad and make diagonal wall
+ #endregion
+
#
# Make quads for top, bottom and walls
#
- no_bottom = self.tile_info["no_bottom"]
- # list of quads for this cell,
- if no_bottom == False and (self.tile_info["have_nan"] or self.tile_info["have_bottom_array"]): #
- quads = [c.topquad, c.bottomquad]
- else:
- quads = [c.topquad] # no bottom quads, only top
+
+ # list of meshes for this cell,
+ meshes = c.meshes_for_model()
+
+ # # list of quads for this cell,
+ # if no_bottom == False and (self.tile_info.have_nan or self.tile.bottom_raster_variants is not None): #self.tile_info.have_bottom_array): #
+ # quads = [c.topquad, c.bottomquad]
+ # else:
+ # quads = [c.topquad] # no bottom quads, only top
# add border quads if we have any (False means no border quad)
- for k in c.borders: # k is N, S, E, W
- if c.borders[k] is not False: quads.append(c.borders[k])
+ # for k in c.borders: # k is N, S, E, W
+ # if c.borders[k] is not False: meshes.append(c.borders[k])
+
+ def triangle_rounded_to_precision(decimals: int, triangle: list[vertex]) -> list[vertex]:
+ output: list[vertex] = []
+ for tv in triangle:
+ output.append(tv.vertex_rounded_to_precision(decimals=decimals))
+ return output
+
+ decimal_precision = 6
- # write the triangles of this quad to buffer
- for q in quads:
- t0, t1 = q.get_triangles() # tri vertices
-
- # for STL this will write triangles (vertices) but for obj this will
- # write indices into s[1]/fo[1] (indices), vertices have to written based on these later
- self.write_triangle_to_buffer(t0)
- self.write_triangle_to_buffer(t1) # could be empty ...
+ # Debug: inspect cell
+ if j == 6 and i == 5:
+ pass
+
+ # write the triangles of the meshes to buffer
+ for q in meshes:
+ mesh_triangles: list[list[vertex]] = []
+ if isinstance(q, quad):
+ quad_triangles = q.get_triangles(split_rotation=self.tile_info.config.split_rotation) # tri vertices
+
+ # for STL this will write triangles (vertices) but for obj this will
+ # write indices into s[1]/fo[1] (indices), vertices have to written based on these later
+ for t in quad_triangles:
+ #mesh_triangles.append(list(t))
+ mesh_triangles.append(
+ triangle_rounded_to_precision(decimals=decimal_precision, triangle=list(t))
+ )
+ # if any(t0):
+ # self.write_triangle_to_buffer(t0)
+ # self.write_triangle_to_buffer(t1) # could be empty ...
+ elif isinstance(q, shapely.Polygon):
+ pass
+ t0 = tuple(polygon_to_list_of_vertex(polygon=q))
+ if len(t0) == 4 and t0[0].coords == t0[3].coords:
+ #mesh_triangles.append(list(t0[:3]))
+ mesh_triangles.append(triangle_rounded_to_precision(decimals=decimal_precision, triangle=list(t0[:3])))
+ else:
+ raise ValueError(f"create_cells: found a polygon to write to buffer that is not a triangle. Expected a tri of length 3+1=4 and [0]==[3] vertex. Polygon had vertex count f{len(t0)}.")
+
+ for mt in mesh_triangles:
+ self.write_triangle_to_buffer(tuple(mt))
print("100%", multiprocessing.current_process(), "\n", file=sys.stderr)
- def write_triangle_to_buffer(self, t):
+ def write_triangle_to_buffer(self, t: tuple[vertex, ...]):
'''write triangle vertices for triangle t to stream buffer self.s for caching.
Once the cache is full, is is writting to disk (self.fo)'''
@@ -936,30 +1026,34 @@ def write_triangle_to_buffer(self, t):
self.num_triangles += 1
# Create triangle coords list, for STL including normal coords (no normals for obj)
- if self.tile_info["fileformat"] != "obj":
- tl = get_normal(t) if self.tile_info["no_normals"] == False else [0,0,0]
+ if self.tile_info.config.fileformat != "obj":
+ tl = get_normal(t) if self.tile_info.config.no_normals == False else [0,0,0]
for v in t:
coords = v.get() # get() => list of coords [x,y,z]
+ # pack 64 bit float to 32 bit and unpack 32 bit back to 64 bit to try to get the same value represented in 32 bit
+ #coords = tuple(map(lambda x: struct.unpack(' use 0)
+ BINARY_FACET = "<12fH" # little endian order 12 32-bit floating-point numbers + 2-byte ("short") unsigned integer ("attribute byte count" -> use 0)
self.s.write(struct.pack(BINARY_FACET, *tl)) # append to s
- elif self.tile_info["fileformat"] == "STLa":
+ elif self.tile_info.config.fileformat == "STLa":
ASCII_FACET ="""facet normal {face[0]:f} {face[1]:f} {face[2]:f}\nouter loop\nvertex {face[3]:f} {face[4]:f} {face[5]:f}\nvertex {face[6]:f} {face[7]:f} {face[8]:f}\nvertex {face[9]:f} {face[10]:f} {face[11]:f}\nendloop\nendfacet\n"""
self.s.write(ASCII_FACET.format(face=tl))
- elif self.tile_info["fileformat"] == "obj":
+ elif self.tile_info.config.fileformat == "obj":
# add facet indices to index stream buffer
vl = [v.get_id() + 1 for v in t] # vertex list +1 b/c obj indices start at 1
self.s[1].write(f"f {vl[0]}, {vl[1]}, {vl[2]}\n")
# for STL maybe write to temp file. This can't work for obj b/c we need the full list
# of tri indices first. Once we have that, we can create a buffer/tempfile
- if self.tile_info["fileformat"] != "obj":
+ if self.tile_info.config.fileformat != "obj":
self.write_buffer_to_file()
def write_buffer_to_file(self, flush=False, chunk_size=100000):
@@ -969,26 +1063,26 @@ def write_buffer_to_file(self, flush=False, chunk_size=100000):
# for obj, write only the indices [1], vertices [0] will be done later
# Only write to file if we're actually using temp files, otherwise just bail out
- if self.tile_info.get("temp_file") is None:
+ if self.tile_info.temp_file is None:
return
if self.num_triangles % chunk_size == 0 or flush == True:
- if self.tile_info["fileformat"] == "STLb":
+ if self.tile_info.config.fileformat == "STLb":
self.fo.write(self.s.getbuffer()) # append (partial) binary buffer to file
self.s.close()
self.s = io.BytesIO()
- elif self.tile_info["fileformat"] == "STLa":
+ elif self.tile_info.config.fileformat == "STLa":
self.fo.write(self.s.getvalue()) # append (partial) text buffer to file
self.s.close()
self.s = io.StringIO()
- elif self.tile_info["fileformat"] == "obj":
+ elif self.tile_info.config.fileformat == "obj":
self.fo[1].write(self.s[1].getvalue())
self.s[1].close()
self.s[1] = io.StringIO()
if flush == True:
# close buffers (needed?)
- if self.tile_info["fileformat"] == "obj":
+ if self.tile_info.config.fileformat == "obj":
self.s[1].close()
else: # STLb and STLa
self.s.close()
@@ -1220,38 +1314,42 @@ def make_STLfile_buffer(self, ascii=False, no_bottom=False, temp_file=None):
# Convert grid into a file or memory buffer containing triangles (plus indices for obj)
def make_file_buffer(self):
+ if self.tile_info is None:
+ print("make_file_buffer: Error: self.tile_info is None")
+ return
+
# check that we have a valid triangle file format
- if self.tile_info["fileformat"] not in ["obj", "STLa", "STLb"]:
- raise ValueError(f"Invalid file format: {self.tile_info['fileformat']}. Supported formats are 'obj', 'STLa', and 'STLb'")
+ if self.tile_info.config.fileformat not in ["obj", "STLa", "STLb"]:
+ raise ValueError(f"Invalid file format: {self.tile_info.config.fileformat}. Supported formats are 'obj', 'STLa', and 'STLb'")
# get file name for temp file (or None if using memory)
- if self.tile_info.get("temp_file") != None: # contains None or a file name.
- temp_file = self.tile_info["temp_file"]
+ if self.tile_info.temp_file != None: # contains None or a file name.
+ temp_file = self.tile_info.temp_file
else:
temp_file = None # means: use memory
# Open in-memory stream buffers s
# s is used to collect the data that is eventually written into a proper file
- if self.tile_info["fileformat"] == "STLb":
+ if self.tile_info.config.fileformat == "STLb":
self.s = io.BytesIO()
mode = "ab" # for using open() later
- elif self.tile_info["fileformat"] == "STLa":
+ elif self.tile_info.config.fileformat == "STLa":
self.s = io.StringIO()
mode = "a"
- elif self.tile_info["fileformat"] == "obj":
+ elif self.tile_info.config.fileformat == "obj":
mode = "a"
# 2 buffers: vertices and indices
self.s = [io.StringIO(), io.StringIO()]
# open temp file for appending, file object self.fo will be used in create_cells()
if temp_file != None:
- if self.tile_info["fileformat"] == "STLa" or self.tile_info["fileformat"] == "STLb":
+ if self.tile_info.config.fileformat == "STLa" or self.tile_info.config.fileformat == "STLb":
try:
self.fo = open(temp_file, mode)
except Exception as e:
print("Error opening:", temp_file, e, file=sys.stderr)
return e
- elif self.tile_info["fileformat"] == "obj":
+ elif self.tile_info.config.fileformat == "obj":
# for obj we need 2 temp files and file objects, so s and fo are now lists
try:
vertsfo = open(temp_file, mode)
@@ -1269,9 +1367,9 @@ def make_file_buffer(self):
# header for STLa and obj
# (STLb header can only pre-pended later)
- if self.tile_info["fileformat"] == "STLa":
+ if self.tile_info.config.fileformat == "STLa":
self.s.write('solid digital_elevation_model\n') # digital_elevation_model is the name of the model
- elif self.tile_info["fileformat"] == "obj":
+ elif self.tile_info.config.fileformat == "obj":
self.s[0].write("g vert\n")
self.s[1].write("g tris\n")
@@ -1282,24 +1380,24 @@ def make_file_buffer(self):
add_simple_bottom = True # True by default, set to False if we can't create a 2-triangle bottom
# We don't have bottom tris but that's OK as we don't them anyway (no_bottom option was set)
- if self.tile_info["no_bottom"] == True: add_simple_bottom = False #
+ if self.tile_info.config.no_bottom == True: add_simple_bottom = False #
# With a NaN (masked) top array, we already have the corresponding full bottom
- if self.tile_info["have_nan"] == True: add_simple_bottom = False
+ if self.tile_info.have_nan == True: add_simple_bottom = False
# with a bottom image/elevation, we also already need a full bottom
- if self.tile_info["bottom_image"] != None or self.tile_info["bottom_elevation"] != None:
+ if self.tile_info.config.bottom_image != None or self.tile_info.config.bottom_elevation != None:
add_simple_bottom = False
# obj files currently don't support simple bottoms
- #if self.tile_info["fileformat"] == 'obj': add_simple_bottom = False
+ #if self.tile_info.fileformat == 'obj': add_simple_bottom = False
# For simple bottom, add 2 triangles based on the corners of the tile
if add_simple_bottom:
- v0 = vertex(self.tile_info["W"], self.tile_info["S"], 0)
- v1 = vertex(self.tile_info["E"], self.tile_info["S"], 0)
- v2 = vertex(self.tile_info["E"], self.tile_info["N"], 0)
- v3 = vertex(self.tile_info["W"], self.tile_info["N"], 0)
+ v0 = vertex(self.tile_info.W, self.tile_info.S, 0)
+ v1 = vertex(self.tile_info.E, self.tile_info.S, 0)
+ v2 = vertex(self.tile_info.E, self.tile_info.N, 0)
+ v3 = vertex(self.tile_info.W, self.tile_info.N, 0)
t0 = (v0, v2, v1) #A
t1 = (v0, v3, v2) #B
@@ -1311,12 +1409,12 @@ def make_file_buffer(self):
if temp_file is None:
# finish STLa stream buffer
- if self.tile_info["fileformat"] == "STLa":
+ if self.tile_info.config.fileformat == "STLa":
self.s.write('endsolid digital_elevation_model') # append end clause
buf = self.s.getvalue()
# For STLb buffer, prepend the header
- if self.tile_info["fileformat"] == "STLb":
+ if self.tile_info.config.fileformat == "STLb":
BINARY_HEADER = "80sI" # up to 80 chars do NOT start with the word solid + number of faces as UINT32
stlb_header = io.BytesIO()
stlb_header.write(struct.pack(BINARY_HEADER, b'Binary STL Writer', self.num_triangles))
@@ -1325,7 +1423,7 @@ def make_file_buffer(self):
buf = stlb_header.getvalue() # CH 5/2025 changed from getbuffer to not return a memory object that c an't be pickled
# fill s[0] and append s[1]
- elif self.tile_info["fileformat"] == "obj":
+ elif self.tile_info.config.fileformat == "obj":
# fill s[0] with all vertices used (keys of vertex class attribute dict)
print("Appending obj triangle indices\n", file=sys.stderr)
for vc in vertex.vertex_index_dict:
@@ -1342,12 +1440,12 @@ def make_file_buffer(self):
self.write_buffer_to_file(flush=True) # write leftover buffer to file, will NOT close fo!
# STLa: append last line
- if self.tile_info["fileformat"] == "STLa":
+ if self.tile_info.config.fileformat == "STLa":
self.fo.write('endsolid digital_elevation_model')
self.fo.close()
# for binary STL we can only now prepend a header as we didn't have num_triangles until now.
- elif self.tile_info["fileformat"] == "STLb":
+ elif self.tile_info.config.fileformat == "STLb":
# rename curent file so we can append it to the header file
self.fo.close()
body_file = temp_file + ".body"
@@ -1361,7 +1459,7 @@ def make_file_buffer(self):
# For obj the the fo[0] temp file (vertices) must be filled, then the
# .idx temp file needs to be appended to i
- elif self.tile_info["fileformat"] == "obj":
+ elif self.tile_info.config.fileformat == "obj":
# fill vertex temp file
print("Appending obj triangle indices\n", file=sys.stderr)
for vc in vertex.vertex_index_dict:
diff --git a/touchterrain/common/interpolate_Z.py b/touchterrain/common/interpolate_Z.py
new file mode 100644
index 00000000..60d8b1dd
--- /dev/null
+++ b/touchterrain/common/interpolate_Z.py
@@ -0,0 +1,251 @@
+import numpy as np
+
+# Mostly math functions generated from Gemini
+
+def sign(p1: tuple[float, ...], p2: tuple[float, ...], p3: tuple[float, ...]):
+ """Calculates the signed area of the triangle formed by p1, p2, and p3.
+ A positive value indicates p3 is to the left of the vector p1p2.
+ A negative value indicates p3 is to the right.
+ Zero indicates p3 is collinear with p1p2.
+ """
+ # Calculate 2D cross product (determinant) of 2x2 matrix
+ return (p2[0] - p1[0]) * (p3[1] - p1[1]) - (p2[1] - p1[1]) * (p3[0] - p1[0])
+
+def is_point_in_triangle(p: tuple[float, ...], triangle: tuple[tuple[float, ...], tuple[float, ...], tuple[float, ...]]):
+ """Checks if a point p is inside the triangle defined by the tuple."""
+ s1 = sign(triangle[0], triangle[1], p)
+ s2 = sign(triangle[1], triangle[2], p)
+ s3 = sign(triangle[2], triangle[0], p)
+
+ # All signs must be the same (or zero for points on edges)
+ # to be inside the triangle.
+ has_neg = (s1 < 0) or (s2 < 0) or (s3 < 0)
+ has_pos = (s1 > 0) or (s2 > 0) or (s3 > 0)
+
+ return not (has_neg and has_pos)
+
+def interpolate_Z_on_3_point_plane(p1, p2, p3, p_new):
+ """
+ Interpolates the Z-value of a new point (x_new, y_new)
+ on the plane defined by three non-collinear points.
+
+ Args:
+ p1 (tuple/list/np.array): (x1, y1, z1) of the first point.
+ p2 (tuple/list/np.array): (x2, y2, z2) of the second point.
+ p3 (tuple/list/np.array): (x3, y3, z3) of the third point.
+ p_new (tuple/list/np.array): (x_new, y_new) of the point to interpolate.
+
+ Returns:
+ float: The interpolated Z-value (z_new).
+ """
+
+ # 1. Separate coordinates (x, y) and values (z)
+ x1, y1, z1 = p1
+ x2, y2, z2 = p2
+ x3, y3, z3 = p3
+ x_new, y_new = p_new
+
+ # 2. Define the matrix M (the coordinate matrix) and the vector Z
+ # M represents the system of equations: M * [A, B, C] = Z
+
+ # M = | x1 y1 1 |
+ # | x2 y2 1 |
+ # | x3 y3 1 |
+ M = np.array([
+ [x1, y1, 1],
+ [x2, y2, 1],
+ [x3, y3, 1]
+ ])
+
+ # Z = | z1 |
+ # | z2 |
+ # | z3 |
+ Z = np.array([z1, z2, z3])
+
+ # 3. Solve for the coefficients [A, B, C]
+ # np.linalg.solve(M, Z) finds the solution vector X such that M @ X = Z
+ try:
+ coefficients = np.linalg.solve(M, Z)
+ except np.linalg.LinAlgError:
+ # This occurs if the determinant is zero (points are collinear or identical)
+ raise ValueError("Error: The three points are collinear or identical and do not define a unique plane.")
+
+ A, B, C = coefficients
+
+ # 4. Interpolate the new point using the plane equation: z_new = A*x_new + B*y_new + C
+ z_new = A * x_new + B * y_new + C
+
+ # Optional: Print the plane equation for reference
+ # print(f"Plane Equation: Z = {A:.4f} * X + {B:.4f} * Y + {C:.4f}")
+
+ return z_new
+
+# # --- Example Usage ---
+# # Known points (x, y, z)
+# point1 = (1.0, 1.0, 5.0) # P1
+# point2 = (5.0, 1.0, 10.0) # P2
+# point3 = (3.0, 5.0, 7.0) # P3
+
+# # New point (x, y) to interpolate
+# new_point = (3.0, 3.0)
+
+# # Perform the interpolation
+# z_interpolated = interpolate_Z_on_3_point_plane(point1, point2, point3, new_point)
+
+# if z_interpolated is not None:
+# print(f"\nCoordinates of the new point: ({new_point[0]}, {new_point[1]})")
+# print(f"The interpolated Z-value is: {z_interpolated:.4f}")
+
+# # Expected output for this example is 7.5
+
+import shapely
+import numpy as np
+from typing import List, Union
+
+def interpolate_z_planar(
+ geometry_2d: shapely.Geometry,
+ planes_3d: List[shapely.geometry.Polygon]
+) -> shapely.Geometry:
+ """
+ Interpolates the Z-dimension onto a 2D Shapely geometry based on
+ containment within a list of 3D Shapely polygons. The Z value is
+ interpolated using the plane equation defined by the first three
+ vertices of the containing 3D polygon.
+
+ Args:
+ geometry_2d: The 2D Shapely geometry (Point, LineString, Polygon, etc.).
+ planes_3d: A list of 3D Shapely Polygon objects.
+
+ Returns:
+ The new 3D Shapely geometry (same type as input).
+
+ Raises:
+ ValueError: If any point in the 2D geometry is not contained, if
+ polygons are not 3D, or if the first three points are collinear.
+ """
+
+ # --- Helper function to get the interpolated Z value ---
+ def get_interpolated_z(point_2d: shapely.geometry.Point) -> float:
+ """Finds the containing 3D polygon and returns the interpolated Z."""
+
+ interpolating_poly: shapely.Polygon = None
+
+ for poly_3d in planes_3d:
+ # Check for 2D containment or intersection along an exterior edge (ignores Z)
+ if point_2d.intersects(poly_3d):
+ interpolating_poly = poly_3d
+
+
+ # If loop completes without getting an intersecting polygon, the point is outside all polygons
+
+ # Pick the plane with the closest exterior or interior ring to interpolate off of
+ if interpolating_poly is None:
+ distance_to_closest_poly_boundary = float('inf')
+ for poly_3d in planes_3d:
+ distance_to_poly_boundary = poly_3d.boundary.distance(point_2d)
+ if distance_to_poly_boundary < distance_to_closest_poly_boundary:
+ distance_to_closest_poly_boundary = distance_to_poly_boundary
+ interpolating_poly = poly_3d
+
+ if interpolating_poly is None:
+ raise ValueError(f"Point {point_2d.wkt} is not within any of the provided 3D polygons and could not get distance to polygon boundaries.")
+
+ coords_3d = interpolating_poly.exterior.coords
+
+ # Check if polygon is 3 unique points (4th point should close the polygon)
+ if len(coords_3d) != 4 or len(coords_3d[0]) < 3:
+ raise ValueError(
+ f"Polygon {planes_3d.index(interpolating_poly)} must have 3 vertices and be 3D."
+ )
+
+ # Use the first three unique vertices to define the plane
+ P1 = np.array(coords_3d[0][:3])
+ P2 = np.array(coords_3d[1][:3])
+ P3 = np.array(coords_3d[2][:3])
+
+ # Calculate two vectors lying on the plane
+ V1 = P2 - P1
+ V2 = P3 - P1
+
+ # Calculate the Normal Vector (A, B, C) using the cross product
+ Normal_Vector = np.cross(V1, V2)
+ A, B, C = Normal_Vector[0], Normal_Vector[1], Normal_Vector[2]
+
+ # Check for near-zero C (This means the plane is near-vertical)
+ if abs(C) < 1e-6:
+ raise ValueError(
+ "The plane defined by the first three points is near-vertical or collinear. Cannot uniquely solve for Z."
+ )
+
+ # Plane Equation: A(x - x0) + B(y - y0) + C(z - z0) = 0
+ # Solving for z:
+ # z = z0 - (A/C)*(x - x0) - (B/C)*(y - y0)
+
+ x_p, y_p = point_2d.x, point_2d.y
+ x0, y0, z0 = P1[0], P1[1], P1[2]
+
+ new_z = z0 - (A / C) * (x_p - x0) - (B / C) * (y_p - y0)
+
+ return float(new_z)
+
+
+ # --- Function to handle the interpolation based on geometry type ---
+ if isinstance(geometry_2d, shapely.geometry.Point):
+ # 1. Handle Point
+ new_z = get_interpolated_z(geometry_2d)
+ new_coords = (geometry_2d.x, geometry_2d.y, new_z)
+ return shapely.geometry.Point(new_coords)
+
+ elif isinstance(geometry_2d, (shapely.geometry.LineString, shapely.geometry.LinearRing)):
+ # 2. Handle LineString or LinearRing
+ new_coords = []
+ for x, y in geometry_2d.coords:
+ point_2d = shapely.geometry.Point(x, y)
+ new_z = get_interpolated_z(point_2d)
+ new_coords.append((x, y, new_z))
+
+ if isinstance(geometry_2d, shapely.geometry.LineString):
+ return shapely.geometry.LineString(new_coords)
+ else: # LinearRing
+ return shapely.geometry.LinearRing(new_coords)
+
+ elif isinstance(geometry_2d, shapely.geometry.Polygon):
+ # 3. Handle Polygon
+
+ # A. Process Exterior Ring
+ exterior_coords_3d = []
+ for x, y in geometry_2d.exterior.coords:
+ point_2d = shapely.geometry.Point(x, y)
+ new_z = get_interpolated_z(point_2d)
+ exterior_coords_3d.append((x, y, new_z))
+
+ # B. Process Interior Rings (Holes)
+ interior_coords_3d_list = []
+ for interior_ring in geometry_2d.interiors:
+ hole_coords_3d = []
+ for x, y in interior_ring.coords:
+ point_2d = shapely.geometry.Point(x, y)
+ new_z = get_interpolated_z(point_2d)
+ hole_coords_3d.append((x, y, new_z))
+ interior_coords_3d_list.append(hole_coords_3d)
+
+ # Recreate the 3D Polygon
+ return shapely.geometry.Polygon(exterior_coords_3d, interior_coords_3d_list)
+
+ elif isinstance(geometry_2d, shapely.geometry.MultiPolygon):
+ # 4. Handle MultiPolygon (NEW)
+
+ new_polygons = []
+ # Iterate over the constituent Polygons in the MultiPolygon
+ for polygon_2d in geometry_2d.geoms:
+ # Recursively call the function for each Polygon
+ # The result is a 3D Polygon
+ polygon_3d = interpolate_z_planar(polygon_2d, planes_3d)
+ new_polygons.append(polygon_3d)
+
+ # Combine the new 3D Polygons into a 3D MultiPolygon
+ return shapely.geometry.MultiPolygon(new_polygons)
+
+ else:
+ # 4. Handle unsupported types
+ raise TypeError(f"Unsupported geometry type: {type(geometry_2d).__name__}")
\ No newline at end of file
diff --git a/touchterrain/common/polygon_clipping.py b/touchterrain/common/polygon_clipping.py
new file mode 100644
index 00000000..378c6fac
--- /dev/null
+++ b/touchterrain/common/polygon_clipping.py
@@ -0,0 +1,592 @@
+import geopandas
+import numpy
+import shapely
+import os
+import multiprocessing
+import functools
+from shapely.ops import unary_union
+import logging
+
+# try to import gdal from multiple sources
+try:
+ import gdal
+except ImportError as err:
+ from osgeo import gdal
+
+from touchterrain.common.BorderEdge import BorderEdge
+from touchterrain.common.RasterVariants import RasterVariants
+from touchterrain.common.user_config import TouchTerrainConfig
+from touchterrain.common.utils import geoCoordToPrint2DCoord, arrayCellCoordToQuadPrint2DCoords
+from touchterrain.common.shapely_utils import flatten_geometries, flatten_geometries_borderEdge, sort_line_segment_based_contains
+from touchterrain.common.shapely_plot import plot_intersection_of_shapely_polygons
+
+logger = logging.getLogger(__name__)
+logger.setLevel(logging.INFO)
+
+
+def _log_info(msg: str) -> None:
+ """Send logging to configured handlers or stdout when none are present (spawned workers)."""
+ if logger.hasHandlers():
+ logger.info(msg)
+ else:
+ print(msg)
+
+def _process_polygon_clip_cell(
+ i: int,
+ j: int,
+ clipping_print2d_polys: list[shapely.Polygon],
+ cell_size_mm: float,
+ tile_y_shape: int,
+) -> tuple[bool, list[shapely.Geometry], dict[str, list[BorderEdge]] | None, bool]:
+ """Collect clipping results based on intersection between a single cell and clipping poly."""
+ quadPrint2DCoords = arrayCellCoordToQuadPrint2DCoords(
+ array_coord_2D=(i, j),
+ cell_size=cell_size_mm,
+ tile_y_shape=tile_y_shape,
+ )
+
+ cell_disjoint = True
+ cell_polygon_intersection_geometry = []
+ cell_polygon_intersection_edge_buckets = None
+ cell_contains_properly = False
+
+ for clippingPrint2DPoly in clipping_print2d_polys:
+
+ # Debug plot of a clipping and cell polygon intersection
+ # if j==1 and i==6:
+ # quadPrint2DPoly = shapely.Polygon(quadPrint2DCoords)
+ # plot_intersection_of_shapely_polygons([clippingPrint2DPoly, quadPrint2DPoly])
+ # pass
+
+ disjoint, intersection_geoms, intersection_edges, contains_properly = find_intersection_geometries(
+ clippingPrint2DPoly=clippingPrint2DPoly,
+ quadPrint2DCoords=quadPrint2DCoords,
+ )
+
+ # Mark cell as not disjoint if needed
+ cell_disjoint &= disjoint
+
+ # Add to cell's "all intersection geometries flattened to polygons"
+ if intersection_geoms:
+ cell_polygon_intersection_geometry.extend(intersection_geoms)
+
+ # Add to cell's "all intersection geometries flattened to single edges and sorted into buckets in a dict"
+ if intersection_edges:
+ if cell_polygon_intersection_edge_buckets is None:
+ cell_polygon_intersection_edge_buckets = {'N': [], 'W': [], 'S': [], 'E': [], 'other': []}
+ for k, v in intersection_edges.items():
+ cell_polygon_intersection_edge_buckets[k].extend(v)
+
+ # Set cell's polygon_intersection_contains_properly
+ if contains_properly:
+ cell_contains_properly = True
+
+ return cell_disjoint, cell_polygon_intersection_geometry, cell_polygon_intersection_edge_buckets, cell_contains_properly
+
+def _process_polygon_clip_rows(
+ row_range: tuple[int, int],
+ clipping_print2d_polys: list[shapely.Polygon],
+ cell_size_mm: float,
+ tile_y_shape: int,
+ grid_width: int,
+) -> tuple[
+ list[tuple[int, int]],
+ list[tuple[int, int, list[shapely.Geometry]]],
+ list[tuple[int, int, dict[str, list[BorderEdge]]]],
+ list[tuple[int, int]],
+]:
+ """Get cell and clipping poly intersection results for a range of rows.
+ Worker can run this to process a chunk of rows. Returns lists of update info to apply in main process."""
+ start_row, end_row = row_range
+ _log_info(f"Polygon clipping starting rows [{start_row}, {end_row})")
+ disjoint_cells = []
+ polygon_intersection_geometry_updates = []
+ polygon_intersection_edge_buckets_updates = []
+ polygon_intersection_contains_properly_updates = []
+
+ for j in range(start_row, end_row):
+ for i in range(grid_width):
+ cell_disjoint, cell_intersection_geoms, cell_edge_buckets, cell_contains_properly = _process_polygon_clip_cell(
+ i=i,
+ j=j,
+ clipping_print2d_polys=clipping_print2d_polys,
+ cell_size_mm=cell_size_mm,
+ tile_y_shape=tile_y_shape,
+ )
+
+ if cell_disjoint:
+ disjoint_cells.append((j, i))
+ if cell_intersection_geoms:
+ polygon_intersection_geometry_updates.append((j, i, cell_intersection_geoms))
+ if cell_edge_buckets and any(len(v) > 0 for v in cell_edge_buckets.values()):
+ polygon_intersection_edge_buckets_updates.append((j, i, cell_edge_buckets))
+ if cell_contains_properly:
+ polygon_intersection_contains_properly_updates.append((j, i))
+
+ _log_info(f"Polygon clipping finished rows [{start_row}, {end_row})")
+ return disjoint_cells, polygon_intersection_geometry_updates, polygon_intersection_edge_buckets_updates, polygon_intersection_contains_properly_updates
+
+def _apply_polygon_clip_updates(
+ surface_raster_variant: list[RasterVariants],
+ top_hint: numpy.ndarray | None,
+ updates: tuple[
+ list[tuple[int, int]],
+ list[tuple[int, int, list[shapely.Geometry]]],
+ list[tuple[int, int, dict[str, list[BorderEdge]]]],
+ list[tuple[int, int]],
+ ],
+) -> None:
+ """Apply passed updates to the first RasterVariant.
+ :param updates: Updates to the RasterVariant as a tuple of lists. Each list contains tuples with the cell location to update and any update data. Lists are in order of disjoint cells, polygon_intersection_geometry, polygon_intersection_edge_buckets, polygon_intersection_contains_properly
+ """
+ disjoint_cells, polygon_intersection_geometry_updates, polygon_intersection_edge_buckets_updates, polygon_intersection_contains_properly_updates = updates
+
+ for j, i in disjoint_cells:
+ for rv in surface_raster_variant:
+ rv.set_location_in_variants(location=(j, i), new_value=numpy.nan, set_edge_interpolation=False)
+ if top_hint is not None:
+ top_hint[j][i] = numpy.nan
+
+ for j, i, geoms in polygon_intersection_geometry_updates:
+ surface_raster_variant[0].polygon_intersection_geometry[j][i] = geoms
+
+ for j, i, edges in polygon_intersection_edge_buckets_updates:
+ surface_raster_variant[0].polygon_intersection_edge_buckets[j][i] = edges
+
+ for j, i in polygon_intersection_contains_properly_updates:
+ surface_raster_variant[0].polygon_intersection_contains_properly[j][i] = True
+
+def find_intersection_geometries(clippingPrint2DPoly: shapely.Polygon, quadPrint2DCoords: list[tuple[float, float]]) -> tuple[bool, list[shapely.Geometry] | None, dict[str, list[BorderEdge]] | None, bool]:
+ """Check if clipping polygon and cell polygon have no/partial/complete overlap. Return whether to set the cell to NaN, intersection polygons, intersection edges.
+
+ Returned intersection edges are a flat list of all edges making up the intersection polygons sorted into buckets depending on them lying on a specific cardinal edge or not.
+
+ :param clippingPrint2DPoly: Clipping polygon in print 2D coordinates
+ :type clippingPrint2DPoly: shapely.Polygon
+ :param quadPrint2DCoords: Cell quad vertices in print 2D coordinates
+ :type quadPrint2DCoords: list[tuple[float, float]]
+ :return: (Should set raster locations to NaN, polygon_intersection_geometry, polygon_intersection_edge_buckets, polygon_intersection_contains_properly)
+ """
+ # TODO: use shapely.box for optimization?
+ quadPrint2DPoly = shapely.Polygon(quadPrint2DCoords)
+
+ polygon_intersection_contains_properly = False
+ if clippingPrint2DPoly.contains_properly(quadPrint2DPoly): # quad is entirely inside polygon
+ # We check if quad is entirely inside border poly with `contains_properly` instead using `contains` due to possible shared edges and points between quad and poly because a shared edge could have a neighboring cell with a partial intersection that does NOT contain the shared edge. i.e. There is a gap between the neighbor cell's intersection polygon and the shared edge. This neighboring cell will have a non-NaN value that does not work with our normal way of checking for wall existence on cells with full normal quads.
+ # leave the cell unchanged
+ polygon_intersection_contains_properly = True
+
+ if clippingPrint2DPoly.disjoint(quadPrint2DPoly): # quad is entirely not in polygon
+ # set the all variants to NaN in that location
+ #surface_raster_variant.set_location_in_variants(location=(j,i), new_value=numpy.nan, set_edge_interpolation=False)
+ return (True, None, None, polygon_intersection_contains_properly) # the edge interpolation raster should not be changed and set to NaN
+ else: # quad is partially inside poly or shares an edge/point
+ intersection_geometry = clippingPrint2DPoly.intersection(quadPrint2DPoly)
+
+ # Get flat list of all intersecting geometries excluding point geometries. Point geometries do not matter for wall generation. If an intersection only has points, we treat the cell like there were no intersections.
+ flat_intersection_geometries = flatten_geometries([intersection_geometry])
+ if len(flat_intersection_geometries) == 0:
+ print("find_intersection_geometries: only point intersection geometries found")
+ #surface_raster_variant.polygon_intersection_geometry[j][i] = flat_intersection_geometries
+ pass
+
+ #intersection geometry as a list of single line segments
+ flat_intersection_borderEdges = flatten_geometries_borderEdge([intersection_geometry])
+
+ #expect quad print2D vertices in CCW order NW SW SE NE
+ #quad 2D edges in CCW order N W S E
+ quadPrint2DNorthEdge = shapely.LineString([list(quadPrint2DCoords[3]),list(quadPrint2DCoords[0])])
+ quadPrint2DWestEdge = shapely.LineString([list(quadPrint2DCoords[0]),list(quadPrint2DCoords[1])])
+ quadPrint2DSouthEdge = shapely.LineString([list(quadPrint2DCoords[1]),list(quadPrint2DCoords[2])])
+ quadPrint2DEastEdge = shapely.LineString([list(quadPrint2DCoords[2]),list(quadPrint2DCoords[3])])
+
+ # sort every lines into buckets based on if the quad edge contains them
+ intersection_edge_buckets = {
+ 'N': [],
+ 'W': [],
+ 'S': [],
+ 'E': [],
+ 'other': [],}
+ for be in flat_intersection_borderEdges:
+ bucket_key = sort_line_segment_based_contains(line_segment=be, north=quadPrint2DNorthEdge, west=quadPrint2DWestEdge, south=quadPrint2DSouthEdge, east=quadPrint2DEastEdge)
+
+ # mark line segment as generating a wall if it is not along a quad edge
+ if bucket_key[1] == False:
+ be.make_wall = True
+
+ if bucket_key[0] not in intersection_edge_buckets:
+ print(f'Unknown bucket key {bucket_key}')
+ intersection_edge_buckets[bucket_key[0]].append(be)
+
+ return (False, flat_intersection_geometries, intersection_edge_buckets, polygon_intersection_contains_properly)
+
+# def find_cell_and_clipping_poly_intersection(surface_raster_variant: list[RasterVariants], cellLocation: tuple[int, int], clippingPrint2DPoly: shapely.Polygon, quadPrint2DCoords: list[tuple[float, float]], top_hint: numpy.ndarray|None) -> bool:
+# """
+# :return: Should set cell value to NaN
+# :rtype: bool
+# """
+
+# if cellLocation[0] == 124 and cellLocation[1] == 0:
+# pass
+
+# intersection_geometries_result = find_intersection_geometries(clippingPrint2DPoly=clippingPrint2DPoly, quadPrint2DCoords=quadPrint2DCoords)
+
+# # Debug: stop here to inspect intersection geometry result for a polygon and the cell
+# if cellLocation[0] == 1 and cellLocation[1] == 6:
+# pass
+
+# # Should set cell values to NaN # we set the cell values to NaN outside this function after evaluating all clipping polygons in case there are multiple polygons
+# # if intersection_geometries_result[0]:
+# # #surface_raster_variant.set_location_in_variants(location=cellLocation, new_value=numpy.nan, set_edge_interpolation=False)
+# # for rv in surface_raster_variant: # Set the location to NaN in all raster variants (top and bottom) to get the same interpolation values between normal/difference modes
+# # rv.set_location_in_variants(location=cellLocation, new_value=numpy.nan, set_edge_interpolation=False)
+# # if top_hint is not None:
+# # top_hint[cellLocation[0]][cellLocation[1]] = numpy.nan
+
+# # Add to cell's"all intersection geometries flattened to polygons"
+# if intersection_geometries_result[1] is not None:
+# if surface_raster_variant[0].polygon_intersection_geometry[cellLocation[0]][cellLocation[1]] is None:
+# surface_raster_variant[0].polygon_intersection_geometry[cellLocation[0]][cellLocation[1]] = []
+# surface_raster_variant[0].polygon_intersection_geometry[cellLocation[0]][cellLocation[1]].extend(intersection_geometries_result[1])
+
+
+# # Add to cell's "all intersection geometries flattened to single edges and sorted into buckets in a dict"
+# if intersection_geometries_result[2] is not None:
+# if surface_raster_variant[0].polygon_intersection_edge_buckets[cellLocation[0]][cellLocation[1]] is None:
+# surface_raster_variant[0].polygon_intersection_edge_buckets[cellLocation[0]][cellLocation[1]] = {}
+# # merge bucket dictionaries and lists
+# for k, v in intersection_geometries_result[2].items():
+# if k in surface_raster_variant[0].polygon_intersection_edge_buckets[cellLocation[0]][cellLocation[1]]:
+# surface_raster_variant[0].polygon_intersection_edge_buckets[cellLocation[0]][cellLocation[1]][k].extend(v)
+# else:
+# surface_raster_variant[0].polygon_intersection_edge_buckets[cellLocation[0]][cellLocation[1]][k] = v
+
+# # Set cell's polygon_intersection_contains_properly
+# if intersection_geometries_result[3]:
+# surface_raster_variant[0].polygon_intersection_contains_properly[cellLocation] = intersection_geometries_result[3]
+
+# return intersection_geometries_result[0]
+
+def find_polygon_clipping_edges(config: TouchTerrainConfig, dem: gdal.Dataset, surface_raster_variant: list[RasterVariants], top_hint: numpy.ndarray|None, print3D_resolution_mm: float):
+ """Find the intersection polygon between each raster cell and the clipping polygon. Sort all individual edges of intersection polygons into buckets stored in RasterVariants based on if the edge lies on a cardinal direction edge of the cell quad. Marks all interior edges as needing walls created.
+
+ Use the first RasterVariant in the list for calculations. Propagate any "set to NaN" changes to any other RasterVariants
+ """
+ if config.edge_clipping_polygon == None:
+ print('find_polygon_clipping_edges: config.edge_fit_polygon_file not defined!')
+ return
+ if config.tileScale == None:
+ print('find_polygon_clipping_edges: config.tileScale not defined!')
+ return
+
+ # if len(surface_raster_variant) == 0:
+ # raise ValueError("list of RasterVariant had no objects")
+
+ # Read the GeoPackage into a GeoDataFrame
+ polygon_boundary_gdf = geopandas.read_file(config.edge_clipping_polygon)
+
+ # reproject vector boundary to same projected CRS as raster
+ polygon_boundary_gdf = polygon_boundary_gdf.to_crs(dem.GetProjectionRef())
+
+ # Initialize an empty list to store boundary Shapely Polygon objects
+ shapely_polygons: list[shapely.Polygon] = []
+
+ # Iterate through the GeoDataFrame and extract polygon geometries
+ for index, row in polygon_boundary_gdf.iterrows():
+ geometry = row.geometry
+ # Check if the geometry is a Polygon or MultiPolygon
+ if isinstance(geometry, shapely.Polygon):
+ shapely_polygons.append(geometry)
+ elif geometry.geom_type == 'MultiPolygon':
+ # If it's a MultiPolygon, iterate through its individual polygons
+ for poly in geometry.geoms:
+ shapely_polygons.append(poly)
+ else:
+ print('unhandled geometry type when flattening clipping polygon file into polygon')
+
+ # Now, 'shapely_polygons' contains a list of boundary Shapely Polygon objects
+ if shapely_polygons:
+ print(f"Found {len(shapely_polygons)} polygons in the GeoPackage.")
+ for idx, poly in enumerate(shapely_polygons, start=1):
+ print(f"Polygon {idx} area: {poly.area}")
+ else:
+ print("No polygons found in the GeoPackage or the specified layer.")
+
+ ulx, pixelwidthx, xskew, uly, yskew, pixelheighty = dem.GetGeoTransform()
+ ncol = dem.RasterXSize
+ nrow = dem.RasterYSize
+ # Calculate lower-right corner coordinates
+ lrx = ulx + (ncol * pixelwidthx) + (nrow * xskew)
+ lry = uly + (ncol * yskew) + (nrow * pixelheighty)
+
+ # Create clipping_intersection_geometry and polygon_intersection_lines_buckets for the first time
+ if surface_raster_variant[0].original is None:
+ print('find_polygon_clipping_edges: original variant is None')
+ return
+ surface_raster_variant[0].polygon_intersection_geometry = numpy.empty(surface_raster_variant[0].original.shape, dtype=object)
+ surface_raster_variant[0].polygon_intersection_edge_buckets = numpy.empty(surface_raster_variant[0].original.shape, dtype=object)
+ surface_raster_variant[0].polygon_intersection_contains_properly = numpy.zeros(surface_raster_variant[0].original.shape, dtype=bool)
+
+ # Precompute clipping polygons in print2D coordinates once (same for all cells)
+ clipping_print2d_polys: list[shapely.Polygon] = []
+ for clippingGeoPoly in shapely_polygons:
+ clippingPrint2DPoly = geoCoordToPrint2DCoord(
+ geoCoord2D=clippingGeoPoly,
+ scale=config.tileScale,
+ geoXMin=ulx,
+ geoYMin=lry,
+ )
+ if isinstance(clippingPrint2DPoly, shapely.Polygon):
+ clipping_print2d_polys.append(clippingPrint2DPoly)
+ elif hasattr(clippingPrint2DPoly, "geoms"):
+ for geom in clippingPrint2DPoly.geoms:
+ if isinstance(geom, shapely.Polygon):
+ clipping_print2d_polys.append(geom)
+ else:
+ print("clippingPrint2DPoly is not a shapely Polygon")
+
+ # determine intersection for polygon(s) in boundary and each cell quad
+ rows = surface_raster_variant[0].original.shape[0]
+ cols = surface_raster_variant[0].original.shape[1]
+
+ # Decide whether to use multiprocessing for clipping
+ use_mp = config.CPU_cores_to_use not in (None, 1)
+ worker_fn = functools.partial(
+ _process_polygon_clip_rows,
+ clipping_print2d_polys=clipping_print2d_polys,
+ cell_size_mm=print3D_resolution_mm,
+ tile_y_shape=rows,
+ grid_width=cols,
+ )
+
+ if not use_mp:
+ updates = worker_fn((0, rows))
+ _apply_polygon_clip_updates(surface_raster_variant, top_hint, updates)
+ else:
+ available_cores = os.cpu_count()-1 or 1
+ requested_cores = available_cores if config.CPU_cores_to_use == 0 else config.CPU_cores_to_use
+ worker_cores = max(1, min(requested_cores, rows))
+ _log_info(f"Computing cell and clipping polygon with {worker_cores} workers")
+
+ base_rows_per_worker = rows // worker_cores
+ extra_rows = rows % worker_cores
+ row_ranges = []
+ start_row = 0
+ for worker_idx in range(worker_cores):
+ rows_for_worker = base_rows_per_worker + (1 if worker_idx < extra_rows else 0)
+ end_row = start_row + rows_for_worker
+ row_ranges.append((start_row, end_row))
+ start_row = end_row
+
+ mp = multiprocessing.get_context('spawn')
+ pool = mp.Pool(
+ processes=worker_cores,
+ maxtasksperchild=1,
+ )
+ try:
+ for updates in pool.imap_unordered(worker_fn, row_ranges):
+ _apply_polygon_clip_updates(surface_raster_variant, top_hint, updates)
+ finally:
+ pool.close()
+ pool.terminate()
+
+def mark_overlapping_edges_for_walls(cell_1_edges: list[BorderEdge], cell_2_edges: list[BorderEdge]):
+ """Mark overlapping edges between a cell and neighbor cell to make a wall. Sets the make_wall property of only the cell with the Polygon side of a match.
+
+ :param cell_1_edges: The target cell
+ :type cell_1_edges: list[BorderEdge]
+ :param cell_2_edges: The neighbor cell
+ :type cell_2_edges: list[BorderEdge]
+ """
+
+ # check if cell 1 edge contains cell 2 edge or if cell 2 edge contains cell 1 edge
+
+ # split the containing edge by the conatined edge
+
+ # mark the contained edge and matching split containing edge sub-edge as skip_future_eval_for_walls to skip in future loops. Check if wall is needed based on if matched edge from a cell is a polygon_line and matched edge from other cell is NOT a polygon_line. L<>PL = make wall. L<>L or PL<>PL = no wall. Mark whichever of these 2 edges is on the PL side as make_wall.
+
+ # delete the containing edge from the list, add the new split edges to the list end
+
+ # if containing edge was on cell 1, do not increment iterator
+
+ # if cell 1 edge is same as cell 2 edge, make wall on P side, mark the edges as skip
+
+ # all edges on cell 1 and 2 should match with an edge on other cell at the end of the loop. i.e. all edges on the shared side of both cells should be marked as skip at the very end
+
+ c1eIdx = 0
+ while c1eIdx < len(cell_1_edges):
+ c1e = cell_1_edges[c1eIdx]
+ if c1e.skip_future_eval_for_walls:
+ c1eIdx += 1
+ continue
+ c2eIdx = 0
+ while c2eIdx < len(cell_2_edges):
+ c2e = cell_2_edges[c2eIdx]
+ if c2e.skip_future_eval_for_walls:
+ c2eIdx += 1
+ continue
+ make_wall = c1e.polygon_line != c2e.polygon_line # Should we make a wall on matching edges? L<>L and PL<>PL have no wall
+ wall_ce = BorderEdge(geometry=shapely.LineString())
+ if make_wall: # mark wall on the P side
+ if c1e.polygon_line:
+ wall_ce = c1e
+ elif c2e.polygon_line:
+ wall_ce = c2e
+
+ containingEdgeList: list[BorderEdge] = []
+ containingEdgeIdx: int = -1
+ splitter: BorderEdge | None = None
+
+ if c1e.geometry.equals(c2e.geometry):
+ wall_ce.make_wall = make_wall
+ c1e.skip_future_eval_for_walls = True
+ c2e.skip_future_eval_for_walls = True
+ elif c1e.geometry.contains(c2e.geometry):
+ containingEdgeList = cell_1_edges
+ containingEdgeIdx = c1eIdx
+ splitter = c2e
+ elif c2e.geometry.contains(c1e.geometry):
+ containingEdgeList = cell_2_edges
+ containingEdgeIdx = c2eIdx
+ splitter = c1e
+ # If edges are not equal but overlap each other, split edges by each other to get sub edges
+ if splitter: # check for side effect of contains() == True
+ sub_edges = unary_union([c1e.geometry, c2e.geometry])
+ splitter.skip_future_eval_for_walls = True
+ splitter.make_wall = splitter is wall_ce
+ for segment in sub_edges.geoms:
+ is_matching_splitter = segment.equals(splitter.geometry)
+ segment_make_wall = is_matching_splitter and containingEdgeList[containingEdgeIdx].polygon_line and make_wall
+ containingEdgeList.append(BorderEdge(
+ geometry=segment,
+ polygon_line=containingEdgeList[containingEdgeIdx].polygon_line,
+ skip_future_eval_for_walls=is_matching_splitter,
+ make_wall=segment_make_wall
+ ))
+ del containingEdgeList[containingEdgeIdx] #remove current evaluated cell 1 edge because it has been replaced by the sub edges
+ if containingEdgeList is cell_1_edges:
+ # Move onto the next cell 1 edge because we matched and split c1 edge. Do not increment cell 1 iterator because we removed the cell 1 edge we just evaluated
+ c1eIdx -= 1 # balance out c1 iterator increment that happens after c2 loop ends
+ break
+ elif containingEdgeList is cell_2_edges:
+ # Move onto the next cell 2 edge because we matched and split a c2 edge. Skip incrementing cell 2 iterator because we removed the cell 2 edge we just evaluated
+ continue
+
+ c2eIdx += 1
+ c1eIdx += 1
+
+def mark_shared_edges_of_cell_for_walls(polygon_intersection_edge_buckets: numpy.ndarray, elevation_raster: numpy.ndarray, cell_location: tuple[int, int], direction: tuple[int, int]):
+ """Mark shared edges of a cell and the neighbor cell in the specified direction to have a wall if the edges overlap.
+
+ :param polygon_intersection_edge_buckets: polygon_intersection_edge_buckets of type ndarray with dtype=object dict[str,list[BorderEdge]]. Should be the ndarray from RasterVariants.
+ :type polygon_intersection_edge_buckets: numpy.ndarray
+ :param elevation_raster: raster of type ndarray with dtype=float64. Should be an ndarray from RasterVariants that tells us if a cell has an elevation value set or not. We need this to differentiate between contained properly and disjoint cell because both cases have no polygon_intersection_edge_buckets set
+ :type elevation_raster: numpy.ndarray
+ :param cell_location: Target cell location in Y,X order
+ :type cell_location: tuple[int, int]
+ :param direction: Direction of neighboring cell in Y,X order
+ :type direction: tuple[int, int]
+ """
+
+ # Debug: inspect a cell
+ if cell_location[0] == 1 and cell_location[1] == 7:
+ pass
+
+ cell_1_edge_buckets: dict[str, list[BorderEdge]] = polygon_intersection_edge_buckets[cell_location]
+ # Get 2 separate cell "2"s, one in vertical direction, one in horizontal direction
+ cell_2_location_y = cell_location[0]+direction[0]
+ cell_2_location_x = cell_location[1]+direction[1]
+
+ cell_2_location_y_in_range = cell_2_location_y >= 0 and cell_2_location_y < polygon_intersection_edge_buckets.shape[0]
+ cell_2_location_x_in_range = cell_2_location_x >= 0 and cell_2_location_x < polygon_intersection_edge_buckets.shape[1]
+
+ # check if cell 1 should be any mesh generated later by checking dilated elevation value for not-NaN
+ if not numpy.isnan(elevation_raster[cell_location]):
+ if not isinstance(cell_1_edge_buckets, dict): # non-NaN cells should be intersecting the clipping polygon (partially or enclosed) and have a edge bucket
+ raise ValueError("cell 1 elevation raster value is not NaN but has no edge bucket dict")
+ # If cell 1 has mesh generated, then check if cell 2 is in range and cell 2 will have mesh generated.
+ if cell_2_location_y_in_range:
+ cell_2_location = (cell_2_location_y,cell_location[1])
+ cell_2_y_edge_buckets = polygon_intersection_edge_buckets[cell_2_location]
+ if not numpy.isnan(elevation_raster[cell_2_location]): # do overlapping edge check if cell 2 dilated elevation is not NaN (assume it has a edge bucket dict if not NaN)
+ # cell 2 should have mesh created later and has buckets to compare
+ if direction[0] == -1: #N
+ mark_overlapping_edges_for_walls(cell_1_edges=cell_1_edge_buckets['N'], cell_2_edges=cell_2_y_edge_buckets['S'])
+ elif direction[0] == 1: #S
+ mark_overlapping_edges_for_walls(cell_1_edges=cell_1_edge_buckets['S'], cell_2_edges=cell_2_y_edge_buckets['N'])
+ elif direction[0] != 0:
+ print(f'mark_shared_edges_of_cell_for_walls: unsupported direction of {direction[0]}')
+ else: # cell 2 elevation_raster is NaN. Make walls on cell 1 shared side.
+ if direction[0] == -1: #N
+ for e in cell_1_edge_buckets['N']:
+ e.make_wall = True
+ if direction[0] == 1: #S
+ for e in cell_1_edge_buckets['S']:
+ e.make_wall = True
+ else:
+ # Direction of cell 2 is out of range.
+ if direction[0] == -1: #N
+ for e in cell_1_edge_buckets['N']:
+ e.make_wall = True
+ if direction[0] == 1: #S
+ for e in cell_1_edge_buckets['S']:
+ e.make_wall = True
+
+ if cell_2_location_x_in_range:
+ cell_2_location = (cell_location[0], cell_2_location_x)
+ cell_2_x_edge_buckets = polygon_intersection_edge_buckets[cell_2_location]
+ if not numpy.isnan(elevation_raster[cell_2_location]): # do overlapping edge check if cell 2 dilated elevation is not NaN (assume it has a edge bucket dict)
+ # cell 2 has buckets to compare
+ if direction[1] == -1: #W
+ mark_overlapping_edges_for_walls(cell_1_edges=cell_1_edge_buckets['W'], cell_2_edges=cell_2_x_edge_buckets['E'])
+ elif direction[1] == 1: #E
+ mark_overlapping_edges_for_walls(cell_1_edges=cell_1_edge_buckets['E'], cell_2_edges=cell_2_x_edge_buckets['W'])
+ elif direction[1] != 0:
+ print(f'mark_shared_edges_of_cell_for_walls: unsupported direction of {direction[1]}')
+ else: # cell 2 elevation_raster is NaN. Make walls on cell 1 shared side.
+ if direction[1] == -1: #W
+ for e in cell_1_edge_buckets['W']:
+ e.make_wall = True
+ if direction[1] == 1: #E
+ for e in cell_1_edge_buckets['E']:
+ e.make_wall = True
+ else:
+ # Direction of cell 2 is out of range.
+ if direction[1] == -1: #W
+ for e in cell_1_edge_buckets['W']:
+ e.make_wall = True
+ if direction[1] == 1: #E
+ for e in cell_1_edge_buckets['E']:
+ e.make_wall = True
+ else:
+ # This case happens when:
+ # If cell 1 elevation_raster is NaN:
+ # e.g. cell 1 is outside the polygon or cell 1 will not have mesh generated for it -> if cell 2 will have mesh generated -> cell 2 should make walls
+ if numpy.isnan(elevation_raster[cell_location]):
+ if cell_2_location_y_in_range and not numpy.isnan(elevation_raster[cell_2_location_y,cell_location[1]]) and polygon_intersection_edge_buckets[cell_2_location_y,cell_location[1]]:
+ cell_2_y_edge_buckets = polygon_intersection_edge_buckets[cell_2_location_y,cell_location[1]]
+ if direction[0] == -1: #N
+ for e in cell_2_y_edge_buckets['S']:
+ e.make_wall = True
+ if direction[0] == 1: #S
+ for e in cell_2_y_edge_buckets['N']:
+ e.make_wall = True
+ if cell_2_location_x_in_range and not numpy.isnan(elevation_raster[cell_location[0],cell_2_location_x]) and polygon_intersection_edge_buckets[cell_location[0],cell_2_location_x]:
+ cell_2_x_edge_buckets = polygon_intersection_edge_buckets[cell_location[0],cell_2_location_x]
+ if direction[1] == -1: #W
+ for e in cell_2_x_edge_buckets['E']:
+ e.make_wall = True
+ if direction[1] == 1: #E
+ for e in cell_2_x_edge_buckets['W']:
+ e.make_wall = True
+ # If cell 1 elevation_raster is not NaN:
+ # e.g. cell 1 is contained properly in boundary -> no walls
+
+ pass
+
+def mark_shared_edges_for_walls(polygon_intersection_edge_buckets: numpy.ndarray, elevation_raster: numpy.ndarray, direction: tuple[int, int]):
+ """Mark shared edges of all cells in an ndarray and the neighbor cell in the specified direction to have a wall if the edges overlap.
+ """
+ for j in range(0, polygon_intersection_edge_buckets.shape[0]): # Y
+ for i in range(0, polygon_intersection_edge_buckets.shape[1]): # X
+ mark_shared_edges_of_cell_for_walls(polygon_intersection_edge_buckets=polygon_intersection_edge_buckets, elevation_raster=elevation_raster, cell_location=(j,i), direction=direction)
diff --git a/touchterrain/common/polygon_test.py b/touchterrain/common/polygon_test.py
new file mode 100644
index 00000000..9965f881
--- /dev/null
+++ b/touchterrain/common/polygon_test.py
@@ -0,0 +1,265 @@
+import geopandas
+from shapely.geometry import Polygon, Point
+import shapely
+
+# Read the GeoPackage into a GeoDataFrame
+gdf = geopandas.read_file('fr.gpkg')
+
+# Initialize an empty list to store Shapely Polygon objects
+shapely_polygons = []
+
+# Iterate through the GeoDataFrame and extract polygon geometries
+for index, row in gdf.iterrows():
+ geometry = row.geometry
+ # Check if the geometry is a Polygon or MultiPolygon
+ if isinstance(geometry, Polygon):
+ shapely_polygons.append(geometry)
+ elif geometry.geom_type == 'MultiPolygon':
+ # If it's a MultiPolygon, iterate through its individual polygons
+ for poly in geometry.geoms:
+ shapely_polygons.append(poly)
+
+# Now, 'shapely_polygons' contains a list of Shapely Polygon objects
+# You can access them and perform further operations
+if shapely_polygons:
+ print(f"Found {len(shapely_polygons)} polygons in the GeoPackage.")
+ print(f"First polygon's area: {shapely_polygons[0].area}")
+else:
+ print("No polygons found in the GeoPackage or the specified layer.")
+
+exterior_vertices = shapely.points(shapely_polygons.exterior.coords)
+
+shapely.get_num_points(shapely_polygons[0].boundary)
+
+print("\nInterior Rings Vertices:")
+for i, interior_ring in enumerate(shapely_polygons[0].interiors):
+ print(f" Hole {i+1}:")
+ print(f" {list(interior_ring.coords)}")
+
+#reproject
+gdf_re = gdf.to_crs('PROJCS["USA_Contiguous_Lambert_Conformal_Conic",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101004,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4269"]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["latitude_of_origin",39],PARAMETER["central_meridian",-96],PARAMETER["standard_parallel_1",33],PARAMETER["standard_parallel_2",45],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH]]')
+
+shapely_polygons_re = []
+
+# Iterate through the GeoDataFrame and extract polygon geometries
+for index, row in gdf_re.iterrows():
+ geometry = row.geometry
+ # Check if the geometry is a Polygon or MultiPolygon
+ if isinstance(geometry, Polygon):
+ shapely_polygons_re.append(geometry)
+ elif geometry.geom_type == 'MultiPolygon':
+ # If it's a MultiPolygon, iterate through its individual polygons
+ for poly in geometry.geoms:
+ shapely_polygons_re.append(poly)
+
+# Now, 'shapely_polygons_re' contains a list of Shapely Polygon objects
+# You can access them and perform further operations
+if shapely_polygons_re:
+ print(f"Found {len(shapely_polygons_re)} polygons in the GeoPackage.")
+ print(f"First polygon's area: {shapely_polygons_re[0].area}")
+else:
+ print("No polygons found in the GeoPackage or the specified layer.")
+
+exterior_vertices = shapely.points(shapely_polygons_re.exterior.coords)
+
+shapely.get_num_points(shapely_polygons_re[0].boundary)
+
+print("\nInterior Rings Vertices:")
+for i, interior_ring in enumerate(shapely_polygons_re[0].interiors):
+ print(f" Hole {i+1}:")
+ print(f" {list(interior_ring.coords)}")
+
+import numpy
+def geoToPrint3DCoordinates(shapelyPolygon, scale, geoXMin, geoYMin):
+
+ def transform(x: numpy.ndarray):
+ return (x - [geoXMin, geoYMin]) / scale
+
+ import shapely
+ shapely.transform(shapelyPolygon, transformation=transform)
+
+geoToPrint3DCoordinates(shapely_polygons_re, 1000, 1000, 1000)
+
+
+from shapely.geometry import Polygon, LineString, Point
+from shapely.ops import split, unary_union
+#poly1 contains poly2
+poly1_coords = [(0, 0), (5, 0), (5, 5), (0, 5), (0, 0)]
+poly1 = Polygon(poly1_coords)
+poly2_coords = [(1, 1), (3, 1), (3, 3), (1, 3), (1, 1)]
+poly2 = Polygon(poly2_coords)
+print(poly1.intersection(poly2))
+
+#poly1 contains part of poly2
+poly1_coords = [(0, 0), (5, 0), (5, 5), (0, 5), (0, 0)]
+poly1 = Polygon(poly1_coords)
+poly2_coords = [(-1, 1), (3, 1), (3, 3), (1, 3), (-1, 1)]
+poly2 = Polygon(poly2_coords)
+print(poly1.intersection(poly2))
+
+#poly1 intersections poly2 to create 2 connected vertices
+poly1_coords = [(0, 0), (5, 0), (5, 5), (0, 5), (0, 0)]
+poly1 = Polygon(poly1_coords)
+poly2_coords = [(-1, -1), (-1, 6), (0, 6), (0, -1), (-1, -1)]
+poly2 = Polygon(poly2_coords)
+print(poly1.intersection(poly2))
+print(poly2.intersection(poly1))
+poly2_coords = [(5, -1), (5, 6), (6, 6), (6, -1), (-5, -1)]
+poly2 = Polygon(poly2_coords)
+print(poly1.intersection(poly2))
+print(poly2.intersection(poly1))
+
+#poly1 intersections poly2 to create 2 closed, disconnected polygons
+poly1_coords = [(0, 0), (5, 0), (5, 5), (0, 5), (0, 0)]
+poly1 = Polygon(poly1_coords)
+poly2_coords = [(-1, 1), (-1, 6), (5, 6), (-0.5, 3), (5, -1), (-1, 1)]
+poly2 = Polygon(poly2_coords)
+print(poly1.intersection(poly2))
+
+#poly1 intersections poly2 to create 2 closed polygons sharing a single vertex
+poly1_coords = [(0, 0), (5, 0), (5, 5), (0, 5), (0, 0)]
+poly1 = Polygon(poly1_coords)
+poly2_coords = [(-1, 1), (-1, 6), (5, 6), (0, 3), (5, -1), (-1, 1)]
+poly2 = Polygon(poly2_coords)
+print(poly1.intersection(poly2))
+
+#poly1 intersections poly2 to create 2 closed polygons sharing 2 connected vertices
+poly1_coords = [(0, 0), (5, 0), (5, 5), (0, 5), (0, 0)]
+poly1 = Polygon(poly1_coords)
+poly2_coords = [(-1, 1), (-1, 6), (5, 6),(0, 4), (0, 3), (5, -1), (-1, 1)]
+poly2 = Polygon(poly2_coords)
+print(poly1.intersection(poly2))
+
+#poly1 intersections poly2 to create 2 closed polygons sharing 4 connected vertices, 2 of those vertices are in a line segment
+poly1_coords = [(0, 0), (5, 0), (5, 5), (0, 5), (0, 0)]
+poly1 = Polygon(poly1_coords)
+poly2_coords = [(0, 1), (0, 6), (5, 6),(0, 4), (0, 3), (5, -1), (0, 1)]
+poly2 = Polygon(poly2_coords)
+print(poly1.intersection(poly2))
+
+#poly1 is contained by poly2 sharing 2 connected vertices
+poly1_coords = [(0, 0), (5, 0), (5, 5), (0, 5), (0, 0)]
+poly1 = Polygon(poly1_coords)
+poly2_coords = [(-1, -1), (-1, 6), (6, 6),(5, 5), (6, 4), (5, 3), (6, -1), (-1,-1)]
+poly2 = Polygon(poly2_coords)
+print(poly1.intersection(poly2))
+
+#poly1 is contained by poly2 sharing 2 connected vertices
+poly1_coords = [(0, 0), (5, 0), (5, 5), (0, 5), (0, 0)]
+poly1 = Polygon(poly1_coords)
+poly2_coords = [(-1, -1), (-1, 6), (6, 6),(5, 5), (6, 4), (4, 3), (6, -1), (-1,-1)]
+poly2 = Polygon(poly2_coords)
+print(poly1.intersection(poly2))
+
+#poly1 intersects a point1 along a vertex or point2 in its interior
+poly1_coords = [(0, 0), (5, 0), (5, 5), (0, 5), (0, 0)]
+poly1 = Polygon(poly1_coords)
+point1 = Point(0,0)
+point2 = Point(1,1)
+print(poly1.intersects(point1)) #true
+print(poly1.intersects(point2)) #true
+
+#line1 is same as line2
+line1_coords= [(0,0), (5,0)]
+line1 = LineString(line1_coords)
+line2_coords= [(0,0), (5,0)]
+line2 = LineString(line2_coords)
+line2rev_coords= [(5,0), (0,0)]
+line2rev = LineString(line2rev_coords)
+line2.contains(line1) #true
+line2rev.contains(line1) #true
+line2.contains_properly(line1) #false
+line1.overlaps(line2) #false
+line2.overlaps(line1) #false
+line2rev.overlaps(line1) #false
+line1.equals(line2) #true
+line1.equals(line2rev) #true
+line1.equals_exact(line2) #true
+line1.equals_exact(line2rev) #false
+
+line1_coords= [(0,0,1), (5,0,2)]
+line2rev_coords= [(5,0,0), (0,0,-1)]
+
+
+#line1 is contained by line2 but not contains_properly
+line1_coords= [(0,0), (5,0)]
+line1 = LineString(line1_coords)
+line2_coords= [(0,0), (10,0)]
+line2 = LineString(line2_coords)
+line2.contains(line1) #true
+line2.contains_properly(line1) #false
+line1.overlaps(line2) #false
+line2.overlaps(line1) #false
+
+# get split sub edges
+merged_lines = unary_union([line1, line2])
+for segment in merged_lines.geoms:
+ print(segment)
+# result = split(line2, line1)
+# result = split(line1, line2)
+# for segment in result.geoms:
+# print(segment)
+
+#line1 is contains_properly by line2
+line1_coords= [(1,0), (5,0)]
+line1 = LineString(line1_coords)
+line2_coords= [(0,0), (10,0)]
+line2 = LineString(line2_coords)
+line2.contains(line1) #true
+line2.contains_properly(line1) #true
+line2.overlaps(line1) #false
+
+#line1 and line2 share 1 point
+line1_coords= [(1,0), (5,0)]
+line1 = LineString(line1_coords)
+line2_coords= [(5,0), (10,0)]
+line2 = LineString(line2_coords)
+line2.contains(line1) #false
+line2.contains_properly(line1) #false
+line2.overlaps(line1) #false
+
+#line1 and line2 each overlap some of each other but not all
+line1_coords= [(1,0), (6,0)]
+line1 = LineString(line1_coords)
+line2_coords= [(5,0), (10,0)]
+line2 = LineString(line2_coords)
+line2.contains(line1) #false
+line2.contains_properly(line1) #false
+line1.overlaps(line2) #true
+line2.overlaps(line1) #true
+
+
+from shapely.plotting import plot_polygon, plot_line, plot_points
+import shapely.geometry as sg
+import shapely.ops as so
+import matplotlib.pyplot as plt
+
+def plot_geom(geom, axs):
+ if geom.geom_type.startswith('Polygon'):
+ plot_polygon(geom, ax=axs, add_points=False, color='red', linestyle=':')
+ elif geom.geom_type.startswith('Line'):
+ plot_line(geom, ax=axs, add_points=True, color='yellow', linestyle='--')
+ else:
+ plot_points(geom, ax=axs, color='brown')
+
+
+polyStart = [poly1, poly2]
+fig, axs = plt.subplots()
+axs.set_aspect('equal', 'datalim')
+
+for geom in polyStart:
+ plot_polygon(geom, ax=axs, add_points=False, color='blue', linestyle='-.')
+
+polyEnd = poly2.intersection(poly1)
+if polyEnd.geom_type.startswith('Multi') or polyEnd.geom_type.startswith('GeometryCollection'):
+ print(polyEnd)
+ for sub_geom in polyEnd.geoms:
+ print(sub_geom)
+ plot_geom(sub_geom, axs)
+else:
+ print(polyEnd)
+ plot_geom(polyEnd, axs)
+
+plt.show()
+
+
diff --git a/touchterrain/common/shapely_plot.py b/touchterrain/common/shapely_plot.py
new file mode 100644
index 00000000..3f2a7372
--- /dev/null
+++ b/touchterrain/common/shapely_plot.py
@@ -0,0 +1,73 @@
+import shapely
+from shapely.plotting import plot_polygon, plot_line, plot_points
+import matplotlib.cm as cm
+import matplotlib.pyplot as plt
+import matplotlib.typing as mt
+
+from touchterrain.common.BorderEdge import BorderEdge
+
+def plot_shapely_poly_or_line(geom: shapely.Geometry, ax):
+ if geom.geom_type.startswith('Polygon'):
+ plot_polygon(geom, ax=ax, add_points=False, color='red', linestyle=':')
+ elif geom.geom_type.startswith('Line'):
+ plot_line(geom, ax=ax, add_points=True, color='yellow', linestyle='--')
+ else:
+ plot_points(geom, ax=ax, color='brown')
+
+def plot_shapely_geom(geom: shapely.Geometry, ax, color: mt.ColorType = 'red', linestyle: str = '-', **kwargs):
+ if geom.geom_type.startswith('Polygon'):
+ plot_polygon(geom, ax=ax, add_points=False, color=color, linestyle=linestyle, **kwargs)
+ elif geom.geom_type.startswith('Line'):
+ plot_line(geom, ax=ax, add_points=True, color=color, linestyle=linestyle, **kwargs)
+ else:
+ plot_points(geom, ax=ax, color='brown')
+
+def plot_intersection_of_shapely_polygons(polys: list[shapely.Polygon]):
+ "Plot 2 polygons and their intersection geometries."
+
+ fig, axs = plt.subplots()
+ axs.set_aspect('equal', 'datalim')
+
+ for geom in polys:
+ plot_polygon(geom, ax=axs, add_points=False, color='blue', linestyle='-.')
+
+ polyEnd = polys[0].intersection(polys[1])
+ if polyEnd.geom_type.startswith('Multi') or polyEnd.geom_type.startswith('GeometryCollection'):
+ print(polyEnd)
+ for sub_geom in polyEnd.geoms:
+ print(sub_geom)
+ plot_shapely_poly_or_line(sub_geom, axs)
+ else:
+ print(polyEnd)
+ plot_shapely_poly_or_line(polyEnd, axs)
+
+ plt.show()
+
+def plot_shapely_geometries_colormap(basePolys: list[shapely.Polygon] = [], intersectionPolys: list[list[shapely.Geometry]] = [], edgeBuckets: list[list[BorderEdge]] = []):
+ "Plot N polygons and lines in a different color each time."
+
+ fig, axs = plt.subplots()
+ axs.set_aspect('equal', 'datalim')
+
+ # Choose a colormap (e.g., 'viridis', 'plasma', 'tab10')
+ cmap = cm.get_cmap('gist_rainbow', len(basePolys)+len(intersectionPolys)+len(edgeBuckets))
+
+ # -- dashed for base poly
+ for i in range(0,len(basePolys)):
+ plot_polygon(basePolys[i], ax=axs, add_points=False, color=cmap(i), linestyle='--', linewidth=2, alpha=0.5)
+
+ # -. dash dot for intersections
+ for i in range(0,len(intersectionPolys)):
+ for ip in intersectionPolys[i]:
+ if ip.geom_type.startswith('Multi') or ip.geom_type.startswith('GeometryCollection'):
+ for sub_geom in ip.geoms:
+ plot_shapely_geom(sub_geom, ax=axs, color=cmap(len(intersectionPolys)+i), linestyle='-.')
+ else:
+ plot_shapely_geom(ip, ax=axs, color=cmap(len(intersectionPolys)+i), linestyle='-.')
+
+ # solid or dot for wall/no wall edges
+ for i in range(0, len(edgeBuckets)):
+ for be in edgeBuckets[i]:
+ plot_shapely_geom(be.geometry, ax=axs, color=cmap(len(basePolys)+len(intersectionPolys)+i), linestyle='-' if be.make_wall else ':', linewidth=(3 if be.make_wall else 1), alpha=(0.8 if be.make_wall else .8))
+
+ plt.show()
\ No newline at end of file
diff --git a/touchterrain/common/shapely_polygon_utils.py b/touchterrain/common/shapely_polygon_utils.py
new file mode 100644
index 00000000..79afb772
--- /dev/null
+++ b/touchterrain/common/shapely_polygon_utils.py
@@ -0,0 +1,192 @@
+import shapely
+from typing import List, Dict, Union
+from touchterrain.common.Vertex import vertex
+
+import numpy as np
+
+from shapely.ops import orient
+
+
+def polygons_equal_3d(a: shapely.Polygon, b: shapely.Polygon, tol=1e-9) -> bool:
+ # Same 2D footprint
+ if not a.equals(b):
+ return False
+
+ # Normalize winding so clockwise/counterclockwise match
+ a, b = orient(a, sign=1.0), orient(b, sign=1.0)
+
+ # Helper to normalize rings (start position doesn’t matter; orientation is preserved by equals)
+ def ring_coords(poly):
+ return [np.array(poly.exterior.coords), *[np.array(r.coords) for r in poly.interiors]]
+
+ rings_a, rings_b = ring_coords(a), ring_coords(b)
+ if len(rings_a) != len(rings_b):
+ return False
+
+ for ra, rb in zip(rings_a, rings_b):
+ if ra.shape != rb.shape:
+ return False
+ # Rotate rb so its first point matches ra’s first point (within tolerance)
+ diffs = np.linalg.norm(rb[:, :2] - ra[0, :2], axis=1)
+ start = int(np.argmin(diffs))
+ rb_rot = np.concatenate([rb[start:], rb[:start]], axis=0)
+ if not (np.allclose(ra[:, :2], rb_rot[:, :2], atol=tol) and
+ np.allclose(ra[:, 2], rb_rot[:, 2], atol=tol)):
+ return False
+ return True
+
+def get_polygon_coordinates_as_tuples(polygon: shapely.Polygon) -> list[tuple[float,...]]:
+ """
+ Extracts all coordinate points of a Shapely Polygon
+ (exterior and interior rings) and returns them as a
+ list of Python tuples.
+
+ Args:
+ polygon: A shapely.geometry.Polygon object.
+
+ Returns:
+ A list where each element is a tuple (x, y) representing a
+ vertex coordinate of the polygon's exterior or interior rings.
+ """
+ if not isinstance(polygon, shapely.Polygon):
+ raise TypeError("Input must be a shapely.geometry.Polygon object.")
+
+ # List to store all coordinates as tuples
+ all_coords_as_tuples = []
+
+ # 1. Process the EXTERIOR ring
+ # polygon.exterior.coords returns a CoordinateSequence
+ exterior_coords = list(polygon.exterior.coords)
+ # Convert each (x, y) pair in the list to a tuple
+ all_coords_as_tuples.extend([tuple(coord) for coord in exterior_coords])
+
+ # 2. Process the INTERIOR rings (holes)
+ # polygon.interiors is an iterable of LinearRings
+ for interior_ring in polygon.interiors:
+ interior_coords = list(interior_ring.coords)
+ # Convert each (x, y) pair in the list to a tuple
+ all_coords_as_tuples.extend([tuple(coord) for coord in interior_coords])
+
+ return all_coords_as_tuples
+
+def polygon_to_list_of_vertex(polygon: shapely.Polygon) -> list[vertex]:
+ if not isinstance(polygon, shapely.Polygon):
+ raise TypeError("Input must be a shapely.geometry.Polygon object.")
+
+ if len(polygon.interiors) > 0:
+ raise ValueError(f"polygon_to_list_of_vertex found interior structure but it only supports exterior structures.")
+
+ # List to store all coordinates as tuples
+ all_coords_as_vertex: list[vertex] = []
+
+ # 1. Process the EXTERIOR ring
+ # polygon.exterior.coords returns a CoordinateSequence
+ exterior_coords = list(polygon.exterior.coords)
+ for c in exterior_coords:
+ all_coords_as_vertex.append(vertex(x=c[0], y=c[1], z=c[2]))
+
+ return all_coords_as_vertex
+
+def create_polygon_from_modified_coords(original_polygon: shapely.Polygon, all_coords_as_tuples: list) -> shapely.Polygon:
+ """
+ Reconstructs a new Shapely Polygon from a flat list of modified coordinates,
+ maintaining the structure of the original polygon (exterior and holes).
+
+ Args:
+ original_polygon: The original shapely.geometry.Polygon object.
+ all_coords_as_tuples: A flat list of (x, y) tuples representing the
+ modified coordinates for both the exterior and
+ all interior rings, in the same order as they
+ were extracted.
+
+ Returns:
+ A new shapely.geometry.Polygon object.
+ """
+ if not isinstance(original_polygon, shapely.Polygon):
+ raise TypeError("Input must be a shapely.geometry.Polygon object.")
+
+ # 1. Determine the length of the original exterior ring
+ exterior_len = len(original_polygon.exterior.coords)
+
+ # 2. Extract the new exterior coordinates
+ # The first 'exterior_len' items in the flat list belong to the exterior.
+ new_exterior_coords = all_coords_as_tuples[:exterior_len]
+
+ # 3. Extract the new interior coordinates (holes)
+ new_interior_rings = []
+
+ # Start the index after the exterior coordinates
+ current_index = exterior_len
+
+ # Iterate through the original interior rings to get their lengths
+ for interior_ring in original_polygon.interiors:
+ # Get the length of the current original hole
+ interior_len = len(interior_ring.coords)
+
+ # Slice the flat coordinate list to get the points for this new hole
+ hole_coords = all_coords_as_tuples[current_index : current_index + interior_len]
+
+ # Add the list of hole coordinates to the interior rings list
+ new_interior_rings.append(hole_coords)
+
+ # Move the index forward for the next hole
+ current_index += interior_len
+
+ # 4. Create the new Polygon
+ # Shapely Polygon constructor is Polygon(exterior, [interior_1, interior_2, ...])
+ new_polygon = shapely.Polygon(new_exterior_coords, new_interior_rings)
+
+ return new_polygon
+
+def get_polygon_points(polygon: shapely.geometry.Polygon) -> Dict[str, List[Union[shapely.geometry.Point, List[shapely.geometry.Point]]]]:
+ """
+ Extracts the coordinates of a Shapely Polygon (including all holes)
+ and returns them as a dictionary of Shapely Point objects.
+ """
+ if not isinstance(polygon, shapely.geometry.Polygon):
+ raise TypeError("Input must be a shapely.geometry.Polygon.")
+
+ # 1. Get Exterior Points
+ # Prefix Point with 'shapely.geometry.'
+ exterior_points = [shapely.geometry.Point(x, y) for x, y in polygon.exterior.coords]
+
+ # 2. Get Interior Points (Holes)
+ interior_points_list = []
+ for interior_ring in polygon.interiors:
+ # Prefix Point with 'shapely.geometry.'
+ hole_points = [shapely.geometry.Point(x, y) for x, y in interior_ring.coords]
+ interior_points_list.append(hole_points)
+
+ return {
+ 'exterior': exterior_points,
+ 'interior': interior_points_list
+ }
+
+# ---
+
+def recreate_polygon_from_points(points_data: Dict[str, List[Union[shapely.geometry.Point, List[shapely.geometry.Point]]]]) -> shapely.geometry.Polygon:
+ """
+ Recreates a Shapely Polygon from a dictionary containing exterior and interior
+ ring points (as Shapely Point objects).
+ """
+ # 1. Extract coordinates for the exterior ring
+ try:
+ exterior_coords = [(p.x, p.y) for p in points_data['exterior']]
+ except (KeyError, AttributeError):
+ raise ValueError("Input data must contain a valid 'exterior' key with a list of Point objects.")
+
+ # 2. Extract coordinates for the interior rings (holes)
+ interior_coords_list = []
+ if points_data.get('interior'):
+ for hole_points in points_data['interior']:
+ if not isinstance(hole_points, list):
+ raise ValueError("Interior data must be a list of lists of Point objects.")
+
+ hole_coords = [(p.x, p.y) for p in hole_points]
+ interior_coords_list.append(hole_coords)
+
+ # 3. Create the Polygon
+ # Prefix Polygon with 'shapely.geometry.'
+ new_polygon = shapely.geometry.Polygon(exterior_coords, interior_coords_list)
+
+ return new_polygon
diff --git a/touchterrain/common/shapely_utils.py b/touchterrain/common/shapely_utils.py
new file mode 100644
index 00000000..89643b11
--- /dev/null
+++ b/touchterrain/common/shapely_utils.py
@@ -0,0 +1,84 @@
+import shapely
+from touchterrain.common.BorderEdge import BorderEdge
+from touchterrain.common.Quad import quad
+
+from touchterrain.common.shapely_polygon_utils import get_polygon_coordinates_as_tuples, create_polygon_from_modified_coords
+from touchterrain.common.interpolate_Z import is_point_in_triangle, interpolate_Z_on_3_point_plane
+
+def linestring_to_segments(linestring_obj: shapely.LineString) -> list[shapely.LineString]:
+ """
+ Splits a Shapely LineString into a list of individual LineString segments.
+
+ :param linestring_obj (shapely.geometry.LineString): The input LineString.
+
+ :returns list: A list of individual LineString segments.
+ """
+ segments = []
+ coords = linestring_obj.coords
+ for i in range(len(coords) - 1):
+ segment = shapely.LineString([coords[i], coords[i+1]])
+ segments.append(segment)
+ return segments
+
+def flatten_geometries(geometries: list[shapely.Geometry], to_single_lines: bool = False) -> list[shapely.Geometry]:
+ """Recursively flatten a list of multi geometries into a list of non-multi geometries.
+ Does not keep any point geometries.
+ :param geometries: List of shapely.Geometries to flatten
+ :param to_single_lines: If geometries should be flattened into single lines (shapely.LineString[1])
+ """
+ flat_list = []
+ for geom in geometries:
+ if geom.geom_type.startswith('Multi') or isinstance(geom, shapely.GeometryCollection):
+ # For collections, iterate over the individual parts
+ flat_list.extend(flatten_geometries(geom.geoms, to_single_lines=to_single_lines))
+ # Add simple, non-empty, non-point geometries to the list.
+ elif geom.is_empty or isinstance(geom, shapely.Point):
+ continue
+ if to_single_lines:
+ if isinstance(geom, shapely.Polygon):
+ flat_list.extend(flatten_geometries([geom.boundary],to_single_lines=to_single_lines))
+ elif isinstance(geom, shapely.LineString):
+ flat_list.extend(linestring_to_segments(geom))
+ else:
+ flat_list.append(geom)
+
+ return flat_list
+
+def flatten_geometries_borderEdge(geometries: list[shapely.Geometry], polygon_parent: bool = False) -> list[BorderEdge]:
+ """Recursively flatten a list of multi geometries into a list of line segment as BorderEdge.
+ :param geometries: List of shapely.Geometries to flatten
+ :param polygon_parent: If the geometries are part of a polygon
+ """
+ flat_list: list[BorderEdge] = []
+ for geom in geometries:
+ if geom.geom_type.startswith('Multi') or isinstance(geom, shapely.GeometryCollection):
+ # For collections, iterate over the individual parts
+ flat_list.extend(flatten_geometries_borderEdge(geom.geoms, polygon_parent=polygon_parent))
+ elif geom.is_empty or isinstance(geom, shapely.Point):
+ continue
+ if isinstance(geom, shapely.Polygon):
+ flat_list.extend(flatten_geometries_borderEdge([geom.boundary], polygon_parent=True))
+ elif isinstance(geom, shapely.LineString):
+ for ls in linestring_to_segments(geom):
+ be = BorderEdge(geometry=ls, polygon_line=polygon_parent)
+ flat_list.append(be)
+
+ return flat_list
+
+def sort_line_segment_based_contains(line_segment: BorderEdge, north: shapely.LineString, west: shapely.LineString, south: shapely.LineString, east: shapely.LineString) -> tuple[str, bool]:
+ """_summary_
+
+ :param line_segment: Border Edge with a LineString to check if the N W S E LineStrings contain it.
+ :return: The bucket key to store it in: N W S E other and whether the line segment lies on a cardinal edge.
+ :rtype: (str, bool)
+ """
+ if north.contains(line_segment.geometry):
+ return ('N', True)
+ if west.contains(line_segment.geometry):
+ return ('W', True)
+ if south.contains(line_segment.geometry):
+ return ('S', True)
+ if east.contains(line_segment.geometry):
+ return ('E', True)
+
+ return ('other', False)
\ No newline at end of file
diff --git a/touchterrain/common/tile_info.py b/touchterrain/common/tile_info.py
new file mode 100644
index 00000000..04115023
--- /dev/null
+++ b/touchterrain/common/tile_info.py
@@ -0,0 +1,72 @@
+from touchterrain.common.user_config import TouchTerrainConfig
+
+class TouchTerrainTileInfo:
+ """
+ Tile info
+ Config and other calculated values
+ """
+
+ config: TouchTerrainConfig
+ "TouchTerrainConfig that the user defined"
+
+ """Set at initialization
+ """
+
+ bottom_relief_mm = 1.0
+ "thickness of the bottom relief image (float), must be less than base_thickness"
+ crs: str = "unprojected"
+ "cordinate reference system, can be EPSG code or UTM zone or any projection"
+ folder_name: str
+ "folder/zip file name for all tiles"
+ full_raster_width = -1
+ "in pixels"
+ full_raster_height = -1
+ geo_transform = None
+ "GeoTransform of geotiff"
+ pixel_mm : float
+ "lateral (x/y) size of a 3D printed 'pixel' in mm"
+ min_bot_elev : float
+ "Minimum elevation of the processed bottom raster. Needed for multi-tile models"
+ scale : float
+ "horizontal scale number, 1000 means 1:1000 => 1m in model = 1000m in reality"
+ temp_file: str | None = None
+ tile_no_x = -1
+ "current(!) tile number along x"
+ tile_no_y = -1
+ "current(!) tile number along y"
+ tile_width: float
+ "in mm"
+ tile_height: float
+ "in mm"
+ #user_offset: float # Removed because we can just minus the user given or calculated min_elev instead of first subtracting min elevation of the raster and adding it back again minus the min_elev and calling it user_offset
+ "offset between actual data min_elev and user given min_elev"
+
+ """Set later"""
+
+ file_size: float
+ "file size in Mb"
+
+
+ """Set in grid_tesselate"""
+
+ max_elev: float
+ "max elevation of processed top raster"
+ max_bot_elev: float
+ "max elevation of processed bot raster"
+
+ have_nan: bool
+ "processed top raster.dilated has NaN"
+ have_bot_nan: bool
+ "processed bottom raster has NaN"
+
+ "corner coordinates (may later be needed for 2 bottom triangles)"
+ W: float
+ E: float
+ N: float
+ S: float
+
+
+
+ def __init__(self, config: TouchTerrainConfig):
+ self.config = config
+
diff --git a/touchterrain/common/user_config.py b/touchterrain/common/user_config.py
new file mode 100644
index 00000000..aac797bc
--- /dev/null
+++ b/touchterrain/common/user_config.py
@@ -0,0 +1,176 @@
+class TouchTerrainConfig:
+ """
+ Raster bounds
+ """
+
+ trlat = None
+ "lat of top right corner"
+ trlon = None
+ "lon of top right corner"
+ bllat = None
+ "lat of bottom left corner"
+ bllon = None
+ "lon of bottom left corner"
+ clean_diags = False
+ "clean 2x2 diagonals. Not needed if edge clipping polygon is manifold."
+ polygon = None
+ "optional geoJSON polygon"
+ poly_file = None
+ "path to a local kml file"
+ polyURL = None
+ "URL to a publicly readable(!) kml file on Google Drive"
+
+ """
+ Raster input
+ All DEMs must exactly match the sizes and cell resolution of importedDEM
+ """
+
+ importedDEM = None
+ "None (means: get the DEM from GEE) or local file name with (top) DEM to be used instead"
+ importedDEM_interp = None
+ "Optional raster file for interpolating at edges"
+ top_elevation_hint = None
+ "elevation raster for the future top of the model that would be used for a future difference mesh. Used for Normal mode where Difference Mesh will be created in the future with the same top raster."
+ bottom_elevation = None
+ "elevation raster for the bottom of the model."
+ projection = None
+ "EPSG number (as int) of projection to be used. Default (None) use the closest UTM zone"
+
+ """
+ Elevation adjustment
+ """
+
+ basethick = 2
+ "thickness (in mm) of printed base"
+ bottom_floor_elev: None|float = None
+ "Set bottom raster to an elevation in locations where bottom is NaN but top raster is not NaN. Defaults to min_elev-1. If set to less than min_elev, difference mesh at that point will go thru base."
+ fill_holes = None
+ "e.g. [10, 7] Specify number of interations to find a neighbor threshold to fill holes. -1 iterations will continue iterations until no more holes are found. Defaults to 7 neighbors in a 3x3 footprint with elevation > 0 to fill a hole with the average of the footprint."
+ ignore_leq = None
+ "ignore elevation values <= this value, good for removing offshore data. This filter is applied to the original DEM that is read in so further processing may set the height lower."
+ lower_leq = None
+ "[threshold, offset] if elevation is lower than threhold, lower it by offset mm. Good for adding emphasis to coastlines. Unaffected by z_scale."
+ min_elev: None|float = None
+ "None means: will be calculated from actual elevation later. min_elev defines the elevation that will be at base_thickness"
+ offset_masks_lower = None
+ "e.g. [[filename, offset], [filename2, offset2],...] Masked regions (pixel values > 0) in the file will be lowered by offset(mm) * pixel value in the final model."
+ top_thickness = None
+ "thickness of the top of the model, i.e. top - thickness = bottom. Must exactly match the sizes and cell resolution of importedDEM"
+ zscale = 1.0
+ "elevation (vertical scaling)"
+
+ """
+ Mesh generation
+ """
+
+ bottom_thru_base = False
+ "if mesh should drop thru to base"
+ CPU_cores_to_use = 0
+ "0 means all cores, None (null in JSON!) => don't use multiprocessing"
+ dirty_triangles = False
+ "allow degenerate triangles for difference mesh. Should only be used for Difference Mesh mode."
+ edge_clipping_polygon: None | str = None
+ "GPKG format file containing a single layer with polygon for edge clipping. The output mesh will be clipped along the polygon at sub-pixel resolution."
+ max_cells_for_memory_only = 500 * 500 * 4
+ "if total number of cells is bigger, use temp_file instead using memory only"
+ no_normals = True
+ "True -> all normals are 0,0,0, which speeds up processing. Most viewers will calculate normals themselves anyway"
+ split_rotation: int = 0
+ """Should quad triangulation rotate the splitting edge based on the slope of the created edge?
+ None, 0 -> NW>SW edges
+ 1 -> Rotate for less steep along split edges > Steeper faces along the split.
+ 2 -> Rotate for more steep along split edges > Less steep faces along the split.
+ """
+ smooth_borders = True
+ "should borders be optimized (smoothed) by removing triangles?"
+ temp_folder = "tmp"
+ "the folder to put the temp files and the final zip file into"
+
+ """
+ Mesh output
+ """
+
+ printres = 1.0
+ "resolution (horizontal) of 3D printer (= size of one pixel) in mm"
+ fileformat = "STLb"
+ """format of 3D model files:
+ "obj" = wavefront obj (ascii)
+ "STLa" = ascii STL
+ "STLb" = binary STL
+ "GeoTiff" = DEM raster only, no 3D geometry
+ """
+ zip_file_name: None | str = "terrain"
+ "name of zipfile containing the tiles (st/obj) and helper files. If None, zip_file_name will use the config filename without the extension."
+
+ """
+ Tiling
+ """
+
+ ntilesx = 1
+ "number of tiles in x"
+ ntilesy = 1
+ "number of tiles in y"
+ tile_centered = False
+ "True-> all tiles are centered around 0/0, False, all tiles 'fit together'"
+ tilewidth = 100
+ "width of each tile in mm (<- !!!!!), tile height is calculated automatically"
+ tilewidth_scale = None
+ "divdes m width of selection box by this to get tilewidth (supersedes tilewidth setting)"
+ tileScale = None
+ "Optional tile scale that takes precedence over tilewidth"
+
+ """
+ GPX Track
+ """
+
+ importedGPX = None
+ "None or List of GPX file paths that are to be plotted on the model"
+ gpxPathHeight = 25
+ "Currently we plot the GPX path by simply adjusting the raster elevation at the specified lat/lon, therefore this is in meters. Negative numbers are ok and put a dent in the model"
+ gpxPixelsBetweenPoints = 10
+ "GPX Files can have a lot of points. This argument controls how many pixel distance there should be between points, effectively causing fewing lines to be drawn. A higher number will create more space between lines drawn on the model and can have the effect of making the paths look a bit cleaner at the expense of less precision"
+ gpxPathThickness = 1
+ "Stack parallel lines on either side of primary line to create thickness. A setting of 1 probably looks the best"
+
+ """
+ Miscellaneous until sorted
+ """
+
+ # these are the args that could be given manually via the web UI. Is there a limit to the options allowed from the manual options input on web?
+ no_bottom = False
+ "don't create any bottom triangles. The STL file is not watertight but should still print fine with most slicers (e.g. Cura) and is much smaller"
+ #rot_degs = None
+ "unused"
+ bottom_image = None
+ "1 band greyscale image to use as bottom relief raster, same for _each_ tile! see make_buttom_raster)"
+ DEM_name: None | str = 'USGS/3DEP/10m'
+ "name of DEM source used in Google Earth Engine. for all valid sources, see DEM_sources in TouchTerrainEarthEngine.py. Also used if specifying a custom mesh and zip and extracted folder name."
+ kd3_render = False
+ "if True will create a html file containing the model as a k3d object."
+ map_img_filename = None
+ "image with a map of the area"
+ only = None
+ "2-list with tile index starting at 1 (e.g. [1,2]), which is the only tile to be processed"
+ original_query_string = None
+ "the query string from the app, including map info. Put into log only. Good for making a URL that encodes the app view"
+ unprojected = False
+ "don't apply UTM projection, can only work when exporting a Geotiff as the mesh export needs x/y in meters"
+ use_geo_coords = None
+ "None, centered, UTM. not-None forces units to be in meters, centered will put 0/0 at model center for all tiles. Not-None will interpret basethickness to be in multiples of 10 meters (0.5 mm => 5 m). create STL coords in UTM: None, \"centered\" or \"UTM\""
+
+ """
+ Runtime only values
+ """
+
+ config_path: None | str = None
+ "The path of the Touch Terrain config file. Set this during runtime. If DEM_name is None or default value, use config filename for default zip and mesh filenames and unzipped folder name."
+
+ def mergeDict(self, dict: dict):
+ "Overwrite the config values with values from a dict. All values from the dict are added to the config including new values."
+ for k in list(dict.keys()):
+ try:
+ getattr(self, k)
+ except AttributeError as e:
+ print(e)
+ print(f"New config key {k} in user dict but not in default config. Adding it to config.")
+ setattr(self, k, dict[k])
\ No newline at end of file
diff --git a/touchterrain/common/utils.py b/touchterrain/common/utils.py
index 7ddbc8fe..91d75db4 100644
--- a/touchterrain/common/utils.py
+++ b/touchterrain/common/utils.py
@@ -1,29 +1,36 @@
'''Utilities for touchterrain'''
-import numpy
-import imageio
-import scipy.stats as stats
-from scipy import ndimage
-from scipy.ndimage import binary_dilation, generic_filter
import os.path
-import k3d
import random
-from glob import glob
import zipfile
+
+from glob import glob
+
+import imageio
+import k3d
import matplotlib.pyplot as plt
import matplotlib as mpl
import matplotlib.colors as mcolors
+import numpy
+import scipy.stats as stats
+
+from scipy import ndimage
+from scipy.ndimage import binary_dilation, generic_filter
+
from matplotlib.colors import ListedColormap
-np = numpy
from touchterrain.common.calculate_ticks import calculate_ticks # calculate nice ticks for elevation visualization
+
+np = numpy
+
+
# Utility to save tile as binary png image
def save_tile_as_image(tile, name):
tile_elev_raster_mask = ~numpy.isnan(tile) * 255
imageio.imsave(name + '.png', tile_elev_raster_mask.astype(numpy.uint8))
-def clean_up_diags(ras):
+def clean_up_diags(ras: numpy.ndarray):
'''clean up diagonal cells as these lead to non-manifold vertices where they meet
These are defined as either 0 1 or 1 0 where 0 == NaN and 1 == non-NaN)
1 0 0 1
@@ -43,7 +50,7 @@ def clean_up_diags(ras):
[0, 1, 1, 0,]])
'''
- # If there are NaNs in the raster there cannot by any diagonal patterns, so we're done!
+ # If there are not NaNs in the raster there cannot by any diagonal patterns, so we're done!
if not numpy.any(numpy.isnan(ras)):
return ras
@@ -184,13 +191,114 @@ def checkForAndFillHole(values):
return raster
+geoXMin = 0
+geoXPixelSize = 0
+geoYMin = 0
+geoYPixelSize = 0
+
+def arrayCellCoordToGeoCoord(array_coord_2D: tuple[float, float], geo_transform: tuple) -> tuple[float, float]:
+ """Transform a tuple of array based cell coordinates in X,Y order to geo coordinates. Returns the new tuple.
+
+ :param array_coord_2D: Tuple(X,Y) of array based cell coordinates 2D.
+ :type array_coord_2D: tuple[float, float]
+ :param geo_transform: GDAL geotransform information in a tuple of order [tl X, pixel width, X-skew, tl Y, Y-skew, pixel height]
+ :type geo_transform: tuple
+ :return: Tuple of geo coordinates
+ :rtype: tuple[float, float]
+ """
+ geoXMin = geo_transform[0]
+ geoXPixelSize = geo_transform[1]
+ geoYMax = geo_transform[3]
+ geoYPixelSize = geo_transform[5]
+
+ geoX = geoXMin + (array_coord_2D[0]+0.5)*geoXPixelSize
+ geoY = geoYMax + (array_coord_2D[1]+0.5)*geoYPixelSize
+ return (geoX, geoY)
+
+def arrayCellCoordToPrint2DCoord(array_coord_2D: tuple[float, float], cell_size: float, tile_y_shape: int) -> tuple[float, float]:
+ """Transform a tuple of 0-based array cell coordinates in X,Y order to Print2D coordinates. Returns the new tuple.
+
+ :param array_coord_2D: Tuple(X,Y) of array based cell coordinates 2D.
+ :type array_coord_2D: tuple[float, float]
+ :param cell_size: Cell size in Print2D coordinates (mm), likely print3D_resolution_mm
+ :type cell_size: float
+ :param tile_y_shape: Tile Y height in number of array indices, likely npim.shape[0]
+ :type tile_y_shape: int
+ :return: Tuple of Print2D coordinates in X,Y order
+ :rtype: tuple[float, float]
+ """
+
+ # The returned float may not be representable in float such as "0.1" or "8.7" which are actually very close number represented in float. But when they are written to STL with struct.pack("f", 0.1), they will be rounded to be equivalent to any other 0.1-ish value.
+
+ printX = 0 + (array_coord_2D[0]+0.5)*cell_size
+ printY = (tile_y_shape - array_coord_2D[1]-0.5)*cell_size
+ return (printX, printY)
+
+def arrayCellCoordToQuadPrint2DCoords(array_coord_2D: tuple[float, float], cell_size: float, tile_y_shape: int) -> list[tuple[float, float]]:
+ """Transform a tuple of 0-based array cell coordinates in X,Y order to Print2D coordinates of the quad corners. Returns the new tuple of Print2D coordinates in NW SW SE NE order.
+
+ :param array_coord_2D: Tuple of 0-based array cell coordinates 2D.
+ :type array_coord_2D: tuple[float, float]
+ :param cell_size: Cell size in Print3D dimension (mm)
+ :type cell_size: float
+ :param tile_y_shape: Tile Y height in number of array indices
+ :type tile_y_shape: int
+ :return: of quad corner locations in Print2D coordinates in NW SW SE NE order
+ :rtype: tuple[tuple[float, float], tuple[float, float], tuple[float, float], tuple[float, float]]
+ """
+ NW = arrayCellCoordToPrint2DCoord(array_coord_2D=(array_coord_2D[0]-0.5, array_coord_2D[1]-0.5), cell_size=cell_size, tile_y_shape=tile_y_shape)
+ SW = arrayCellCoordToPrint2DCoord(array_coord_2D=(array_coord_2D[0]-0.5, array_coord_2D[1]+0.5), cell_size=cell_size, tile_y_shape=tile_y_shape)
+ SE = arrayCellCoordToPrint2DCoord(array_coord_2D=(array_coord_2D[0]+0.5, array_coord_2D[1]+0.5), cell_size=cell_size, tile_y_shape=tile_y_shape)
+ NE = arrayCellCoordToPrint2DCoord(array_coord_2D=(array_coord_2D[0]+0.5, array_coord_2D[1]-0.5), cell_size=cell_size, tile_y_shape=tile_y_shape)
+
+ return [NW, SW, SE, NE]
+
+import shapely
+def geoCoordToPrint2DCoord(geoCoord2D: shapely.Geometry | tuple[float, float] , scale: float, geoXMin: float, geoYMin: float) -> shapely.Geometry | tuple[float, float]:
+ """Transform a geometry or tuple from geo coordinates to print2D coordinates. Returns the new geometry or tuple.
+
+ Array coordinates have min Y at top
+
+ Projected geo coordinates have origin at lower left.
+ Print2D coordinates have origin at lower left.
+
+ :param geoCoord2D: Shapely geometry type object to transform. Coordinates are in projected geo coordinates. The polygon from clipping boundary file CRS should be reprojected to the projected CRS of the raster.
+ :type geoCoord2D: shapely.Geometry
+ :param scale: raster to print 3D scale (use tileScale)
+ :type scale: float
+ :param geoXMin: geocoordinate X min of the raster
+ :type geoXMin: float
+ :param geoYMin: geo coordinate Y min of the raster
+ :type geoYMin: float
+ """
+
+ returnAsTuple = False
+ if isinstance(geoCoord2D, tuple):
+ returnAsTuple = True
+ geoCoord2D = shapely.Point(geoCoord2D)
+
+ scale /= 1000 # print2D coordinates are in mm but the scale is for real world meters to print meters.
+
+ def transform(x: numpy.ndarray):
+ return (x - [geoXMin, geoYMin]) / scale
+
+ print3DCoord2D = shapely.transform(geoCoord2D, transformation=transform)
+
+ if returnAsTuple:
+ if isinstance(print3DCoord2D, shapely.Point):
+ return (print3DCoord2D.x, print3DCoord2D.y)
+ else:
+ print(f'geoCoordToPrint3DCoord: could not return tuple')
+
+ return print3DCoord2D
+
+def interpolatePointWithQuadPrint2DCoordsAndCornerElev(interpolatePoint: tuple[float, float], quadPrint2DCoords: list[tuple[float, float]], quadCornerElev: list[float]):
+ pass
def add_to_stl_list(stl, stl_list):
stl_list.append(stl)
return stl_list
-
-
def k3d_render_to_html(stl_list, folder, buffer=False):
"""stl_list is either a list of buffers or a list of filenames
folder is the folder where the html file will be saved
@@ -303,7 +411,7 @@ def plot_DEM_histogram(npim, DEM_name, temp_folder):
return plot_file_name
-def dilate_array(raster, dilation_source=None):
+def dilate_array(raster, dilation_source:numpy.ndarray|None=None, dilation_cycles:int=1, limit_mask=None):
'''Will dilate raster (1 cell incl diagonals) with the corresponding cell values of the dilation_source.
If dilation_source is None the dilation will be filled with the 3 x 3 nanmean
returns the dilated raster'''
@@ -313,8 +421,11 @@ def dilate_array(raster, dilation_source=None):
# Convert raster to a binary array, where True represents non-NaN values
nan_mask = ~np.isnan(raster)
- # Perform the binary dilation operation
- dilated_nan_mask = binary_dilation(nan_mask)
+ # Perform the binary dilation operation as many times as specified
+ # generate dilation mask with multiple cycles at once because some locations may be separated by NaN and not reachable with individual dilations
+ dilated_nan_mask = binary_dilation(nan_mask, mask=limit_mask)
+ for _ in range(dilation_cycles-1):
+ dilated_nan_mask = binary_dilation(dilated_nan_mask, mask=limit_mask)
# Create a mask that is True for pixels in the dilation zone that are NaN in the bottom raster
mask = dilated_nan_mask & ~nan_mask
@@ -335,7 +446,7 @@ def dilate_array(raster, dilation_source=None):
# [False False True True]
# [ True True True True]]
- dilated_mask = binary_dilation(mask) # Perform a binary dilation
+ dilated_mask = binary_dilation(mask, mask=limit_mask) # Perform a binary dilation
# [[False True True True]
# [ True True True True]
# [ True True True True]]
diff --git a/touchterrain/server/TouchTerrain_app.py b/touchterrain/server/TouchTerrain_app.py
index afa7ee92..ad9292fd 100644
--- a/touchterrain/server/TouchTerrain_app.py
+++ b/touchterrain/server/TouchTerrain_app.py
@@ -578,7 +578,7 @@ def preflight_generator():
# Create zip and write to tmp
#
try:
- totalsize, full_zip_file_name = TouchTerrainEarthEngine.get_zipped_tiles(**args) # all args are in a dict
+ totalsize, full_zip_file_name = TouchTerrainEarthEngine.get_zipped_tiles(args) # all args are in a dict
except Exception as e:
print("Error:", e, file=sys.stderr)
html = '