diff --git a/.gitignore b/.gitignore
index dd93839..b008c62 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,14 +1,35 @@
-# Logs and databases #
-######################
-slurm-*.out
-output*.out
-ERA5_createForcing_CONUS*.sh
-core.*
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*/__pycache__/
+*.py[cod]
+*$py.class
-# notebook stuff
-.ipynb_checkpoints
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+*.pyc
-# rtd
-rtd/build
+# Sphinx documentation
+docs/_build/
-rtd/source/*.md
\ No newline at end of file
+# test suite
+tests/*/results
+
+# code editors
+.vscode
diff --git a/README.rst b/README.rst
new file mode 100644
index 0000000..70ee594
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,49 @@
+CWARHM
+=======
+
+**CWARHM** is a Python library to organize workflows to build hydrological models.
+The Package branch is to develop the original SUMMA-CWARHM (currently on main, in this branch moved under dependencies/summa-cwarhm) into a python package where the code exectuting workflow steps are callable functions, and the actual workflows (here a workflow are all the steps involved in setting up an hydrological model) are scripts calling the available functions in a certain order.
+
+Installation
+--------------
+The dependencies of the CWARHM package (or CWARHM-Assembler - CWARHMA) can be installed using conda:
+
+ ``cd /path/to/summaWorkflow_public``
+
+ ``conda env create -f environment.yml``
+
+ ``conda activate cwarhm-env``
+
+
+Additionally the package can be installed in the environment in developer mode (not needed for the test):
+
+ ``cd /path/to/summaWorkflow_public``
+
+ ``conda activate cwarhm-env``
+
+ ``pip install -e .``
+
+Test Bow at Banff SUMMA
+----------
+As test case workflows/summa_bowatbanff/ is available. Before running the test script test_bow_at_banff.py, some path modifications have to be made:
+
+ 1.) In workflows/summa_bowatbanff/control_Bow_at_Banff_test.txt, change the **root_path** modeling domain setting to a local folder where the test results should be saved.
+
+ 2.) In workflows/summa_bowatbanff/test_bow_at_banff.py, change the paths:
+ - cwarhm_summa_folder; to point to the `./summaWorkflow_public/dependencies/cwarhm-summa` folder
+ - results_folder_path , to match the root_path in the control file
+ - test_data_path , path to the test data, if you want to skip the data download workflow steps (by default)
+ - reset_test to True (default), this is a flag that starts a new run (and deletes all data from any results folder), and restarts by copying the test data to the results folder or False: continue with existing data in the results folder.
+
+ 3.) The test data is not part of this repo due to its size (30+GB). For those in the comphyd group it can be found here:
+
+**copernicus** /project/gwf/gwf_cmt/cwarhm_test_data
+**GRAHAM** /project/6008034/CompHydCore/cwarhm_test_data
+
+If this is not accessible, you can download the data with the CWARHM functions (wrapped from the original CWARHM). You can also use a results directory from an earlier test run.
+
+Test Bow at Banff MESH
+---------
+The Bow at Banff MESH test, performs a part of a complete workflow (with data specific parts and most of the model agnostic parts processed by the above workflow). to have a look at the relevant functions have a look at tests/test_mesh_bowatbanff.py
+Note that for it to run some input data is needed that is now included in the test folder and will be extracted automatically (21MB).
+Only one path has to adjusted, but in two(!) places (see test/test_mesh_bowatbanff.py).
diff --git a/cwarhm/__init__.py b/cwarhm/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/cwarhm/data_specific_processing/__init__.py b/cwarhm/data_specific_processing/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/cwarhm/data_specific_processing/era5.py b/cwarhm/data_specific_processing/era5.py
new file mode 100644
index 0000000..be9b924
--- /dev/null
+++ b/cwarhm/data_specific_processing/era5.py
@@ -0,0 +1,571 @@
+
+import netCDF4 as nc4
+from pathlib import Path
+import time, os
+import math
+import cdsapi # copernicus connection
+import calendar # to find days per month
+import os # to check if file already exists
+from pathlib import Path
+from shutil import copyfile
+from datetime import datetime
+import multiprocessing
+
+from cwarhm.util.util import run_in_parallel
+
+
+def round_coords_to_ERA5(coords):
+ """Round bounding box coordinates to ERA5 resolution
+
+ Note
+ ----
+ from CWARHM by Wouter Knoben
+ https://github.com/CH-Earth/CWARHM/blob/bdd5c388b7f307c6afe1228d4606c6a706fba9d7/3a_forcing/1a_download_forcing/download_ERA5_pressureLevel_annual.py#L29
+
+
+ Parameters
+ ----------
+ coords : list
+ Coordinates in the format [lat_max,lon_min,lat_min,lon_max]
+
+ Returns
+ -------
+ dl_string : str
+ rounded coordinates in format '{lat_min}/{lon_max}/{lat_max}/{lon_min}'
+ rounded_lat : list
+ [lat_max,lat_min]
+ rounded_lon : list
+ [lon_max,lon_min]
+
+ """
+ # Extract values
+ lon = [coords[1],coords[3]]
+ lat = [coords[2],coords[0]]
+
+ # Round to ERA5 0.25 degree resolution
+ rounded_lon = [math.floor(lon[0]*4)/4, math.ceil(lon[1]*4)/4]
+ rounded_lat = [math.floor(lat[0]*4)/4, math.ceil(lat[1]*4)/4]
+
+ # Find if we are still in the representative area of a different ERA5 grid cell
+ if lat[0] > rounded_lat[0]+0.125:
+ rounded_lat[0] += 0.25
+ if lon[0] > rounded_lon[0]+0.125:
+ rounded_lon[0] += 0.25
+ if lat[1] < rounded_lat[1]-0.125:
+ rounded_lat[1] -= 0.25
+ if lon[1] < rounded_lon[1]-0.125:
+ rounded_lon[1] -= 0.25
+
+ # Make a download string
+ dl_string = '{}/{}/{}/{}'.format(rounded_lat[1],rounded_lon[0],rounded_lat[0],rounded_lon[1])
+
+ return dl_string, rounded_lat, rounded_lon
+
+def generate_download_requests(year,bbox,path_to_save_data,target_dataset):
+ """Generate cdsapi requests for one year of ERA5 data in monthly chunks.
+
+ The request list can be downloaded by using :func:wait_for_and_download_requests .
+ The variables requested are the variables needed to run a land-surface scheme
+ (e.g. SUMMA / CLASS / MESH).
+
+
+
+ Note
+ ----
+ Using the download function requires acces to cdsapi:
+ - Registration: https://cds.climate.copernicus.eu/user/register?destination=%2F%23!%2Fhome
+ - Setup of the `cdsapi`: https://cds.climate.copernicus.eu/api-how-to
+
+ Adapted from CWARHM by Wouter Knoben
+ https://github.com/CH-Earth/CWARHM/blob/bdd5c388b7f307c6afe1228d4606c6a706fba9d7/3a_forcing/1a_download_forcing/download_ERA5_pressureLevel_annual.py#L29
+
+ and example from ECMWF
+ https://github.com/ecmwf/cdsapi/blob/master/examples/example-era5-update.py
+
+ Requesting the parameters from the pressure_level takes a long time (days rather
+ than hours) because those parameters are stored on slow storage at ECMWF.
+
+ Parameters
+ ----------
+ year : int
+ year to download e.g. 2002
+ bbox : list
+ list of bounding box coordinates [lat_max,lon_min,lat_min,lon_max]
+ path_to_save_data : str
+ path to folder to save data in
+ target_dataset : str
+ either 'pressure_level' or 'surface_level'
+
+ Returns
+ -------
+ request_list : list
+ list with cdsapi requests
+ download_list : list
+ list of path destinations for downloads
+ """
+ download_list = []
+ request_list = []
+ # Find the rounded bounding box
+ coordinates,_,_ = round_coords_to_ERA5(bbox)
+ # connect to Copernicus (requires .cdsapirc file in $HOME)
+ c = cdsapi.Client(debug=True, wait_until_complete=False)
+ # --- Start the month loop
+ for month in range(1,13): # this loops through numbers 1 to 12
+
+ # find the number of days in this month
+ daysInMonth = calendar.monthrange(year,month)
+
+ # compile the date string in the required format. Append 0's to the month number if needed (zfill(2))
+ date = str(year) + '-' + str(month).zfill(2) + '-01/' + \
+ str(year) + '-' + str(month).zfill(2) + '-' + str(daysInMonth[1]).zfill(2)
+
+ # compile the file name string
+ file_path = os.path.join(path_to_save_data,('ERA5_{}_'.format(target_dataset) + str(year) + str(month).zfill(2) + '.nc'))
+
+ # track progress
+ print('Trying to download ' + date + ' into ' + str(file_path))
+
+ # if file doesn't yet exist, download the data
+ if not os.path.isfile(file_path):
+
+ # Make sure the connection is re-tried if it fails
+ retries_max = 1
+ retries_cur = 1
+ while retries_cur <= retries_max:
+ try:
+ # specify and request data
+ if target_dataset == 'pressure_level':
+ request_list.append(
+ c.retrieve('reanalysis-era5-complete', { # do not change this!
+ 'class': 'ea',
+ 'expver': '1',
+ 'stream': 'oper',
+ 'type': 'an',
+ 'levtype': 'ml',
+ 'levelist': '137',
+ 'param': '130/131/132/133',
+ 'date': date,
+ 'time': '00/to/23/by/1',
+ 'area': coordinates,
+ 'grid': '0.25/0.25', # Latitude/longitude grid: east-west (longitude) and north-south resolution (latitude).
+ 'format' : 'netcdf',
+ },)
+ )
+ download_list.append(file_path)
+
+ elif target_dataset == 'surface_level':
+ request_list.append(
+ c.retrieve('reanalysis-era5-single-levels',{
+ 'product_type': 'reanalysis',
+ 'format': 'netcdf',
+ 'variable': [
+ '10m_u_component_of_wind',
+ '10m_v_component_of_wind',
+ '2m_dewpoint_temperature',
+ '2m_temperature',
+ 'mean_surface_downward_long_wave_radiation_flux',
+ 'mean_surface_downward_short_wave_radiation_flux',
+ 'mean_total_precipitation_rate',
+ 'surface_pressure',
+ ],
+ 'date': date,
+ 'time': '00/to/23/by/1',
+ 'area': coordinates, # North, West, South, East. Default: global
+ 'grid': '0.25/0.25', # Latitude/longitude grid: east-west (longitude) and north-south
+ },
+ ))
+ download_list.append(file_path) # file path and name
+ else:
+ print('No valid target. Target is either surface_level or pressure_level')
+
+
+ except Exception as e:
+ print('Error creating request ' + str(file_path) + ' on try ' + str(retries_cur))
+ print(str(e))
+ retries_cur += 1
+ continue
+ else:
+ break
+ return request_list, download_list
+
+def wait_for_and_download_requests(req_list,download_paths,sleep=30):
+ """loop over cdsapi request list and download when ready
+
+ Will end when all downloads are completed
+
+ Parameters
+ ----------
+ req_list : list
+ list of cdsapir requests (from :func:generate_download_requests)
+ download_paths : list
+ list of target file paths matching requests
+ sleep : int, optional
+ time to wait in seconds before checking, by default 30
+ """
+ # initialize all requests as queued
+ conditions = ["queued"]*len(req_list)
+ while any(element in ("queued", "running") for element in conditions):
+ for i,r in enumerate(req_list):
+ #sleep = 30
+ r.update()
+ reply = r.reply
+ # this is logging
+ r.info("Request ID: %s, state: %s" % (reply["request_id"], reply["state"]))
+ # change state
+ conditions[i]=reply["state"]
+
+ if reply["state"] == "completed":
+ print('start download {}'.format(download_paths[i]))
+ r.download(download_paths[i])
+ print('done downloading {}'.format(download_paths[i]))
+ conditions[i]="downloaded"
+ elif reply["state"] in ("queued", "running"):
+ r.info("Request ID: %s, sleep: %s", reply["request_id"], sleep)
+ elif reply["state"] in ("failed",):
+ r.error("Message: %s", reply["error"].get("message"))
+ r.error("Reason: %s", reply["error"].get("reason"))
+ time.sleep(sleep)
+ # delete requests
+ for i,r in enumerate(req_list):
+ r.delete()
+
+def download_one_era5_year(year,bbox,path_to_save_data,target_dataset):
+ """Download one year of ERA5 data in monthly chunks.
+
+ Note
+ ----
+ Adapted from CWARHM by Wouter Knoben
+ https://github.com/CH-Earth/CWARHM/blob/bdd5c388b7f307c6afe1228d4606c6a706fba9d7/3a_forcing/1a_download_forcing/download_ERA5_pressureLevel_annual.py#L29
+
+ Parameters
+ ----------
+ year : int
+ year to download e.g. 2002
+ bbox : list
+ list of bounding box coordinates [lat_max,lon_min,lat_min,lon_max]
+ path_to_save_data : str
+ path to folder to save data in
+ target_dataset : str
+ either 'pressure_level' or 'surface_level'
+ """
+ # Find the rounded bounding box
+ coordinates,_,_ = round_coords_to_ERA5(bbox)
+ # --- Start the month loop
+ for month in range (1,13): # this loops through numbers 1 to 12
+
+ # find the number of days in this month
+ daysInMonth = calendar.monthrange(year,month)
+
+ # compile the date string in the required format. Append 0's to the month number if needed (zfill(2))
+ date = str(year) + '-' + str(month).zfill(2) + '-01/' + \
+ str(year) + '-' + str(month).zfill(2) + '-' + str(daysInMonth[1]).zfill(2)
+
+ # compile the file name string
+ file_path = os.path.join(path_to_save_data,('ERA5_{}_'.format(target_dataset) + str(year) + str(month).zfill(2) + '.nc'))
+
+ # track progress
+ print('Trying to download ' + date + ' into ' + str(file_path))
+
+ # if file doesn't yet exist, download the data
+ if not os.path.isfile(file_path):
+
+ # Make sure the connection is re-tried if it fails
+ retries_max = 1
+ retries_cur = 1
+ while retries_cur <= retries_max:
+ try:
+
+ # connect to Copernicus (requires .cdsapirc file in $HOME)
+ c = cdsapi.Client()
+
+ # specify and retrieve data
+ if target_dataset == 'pressure_level':
+ c.retrieve('reanalysis-era5-complete', { # do not change this!
+ 'class': 'ea',
+ 'expver': '1',
+ 'stream': 'oper',
+ 'type': 'an',
+ 'levtype': 'ml',
+ 'levelist': '137',
+ 'param': '130/131/132/133',
+ 'date': date,
+ 'time': '00/to/23/by/1',
+ 'area': coordinates,
+ 'grid': '0.25/0.25', # Latitude/longitude grid: east-west (longitude) and north-south resolution (latitude).
+ 'format' : 'netcdf',
+ }, file_path)
+
+ elif target_dataset == 'surface_level':
+ c.retrieve('reanalysis-era5-single-levels',{
+ 'product_type': 'reanalysis',
+ 'format': 'netcdf',
+ 'variable': [
+ 'mean_surface_downward_long_wave_radiation_flux',
+ 'mean_surface_downward_short_wave_radiation_flux',
+ 'mean_total_precipitation_rate',
+ 'surface_pressure',
+ ],
+ 'date': date,
+ 'time': '00/to/23/by/1',
+ 'area': coordinates, # North, West, South, East. Default: global
+ 'grid': '0.25/0.25', # Latitude/longitude grid: east-west (longitude) and north-south
+ },
+ file_path) # file path and name
+ else:
+ print('No valid target. Target is either surface_level or pressure_level')
+
+ # track progress
+ print('Successfully downloaded ' + str(file_path))
+
+ except Exception as e:
+ print('Error downloading ' + str(file_path) + ' on try ' + str(retries_cur))
+ print(str(e))
+ retries_cur += 1
+ continue
+ else:
+ break
+
+def run_era5_download_in_parallel(years,bbox,path_to_save_data,target_dataset):
+ """Run download_one_era5_year in parallel
+
+ Parameters
+ ----------
+ years : list
+ years to download e.g. [2002,2003,2004]
+ bbox : list
+ list of bounding box coordinates [lat_max,lon_min,lat_min,lon_max]
+ path_to_save_data : str
+ path to folder to save data in
+ target_dataset : str
+ either 'pressure_level' or 'surface_level'
+ """
+ pool = multiprocessing.Pool()
+ outputs = [pool.apply_async(download_one_era5_year, args=(year, bbox, path_to_save_data, target_dataset)) for year in years]
+ print(outputs)
+ pool.close()
+
+def merge_era5_surface_and_pressure_level_downloads(forcingPath, mergePath, years_str):
+ """Combine separate surface and pressure level downloads
+ Creates a single monthly `.nc` file with SUMMA-ready variables for further processing. # Combines ERA5's `u` and
+ `v` wind components into a single directionless wind vector.
+
+ Note
+ ----
+ This function is no longer needed as merging is more effective using xarray
+
+ :param forcingPath: path to raw ERA5 surface and pressure level data
+ :param mergePath: path to save merged ERA5 data
+ :param year_str: start,end year string from control file (e.g., "2008,2013")
+ """
+
+ # processing
+ years = [int(s) for s in years_str.split(',')]
+ forcingPath = Path(forcingPath)
+ mergePath = Path(mergePath)
+ os.makedirs(mergePath, exist_ok=True)
+
+ # --- Merge the files
+ # Loop through all years and months
+ for year in range(years[0] ,years[1 ] +1):
+ for month in range (1 ,13):
+
+ # Define file names
+ data_pres = 'ERA5_pressureLevel137_' + str(year) + str(month).zfill(2) + '.nc'
+ data_surf = 'ERA5_surface_' + str(year) + str(month).zfill(2) + '.nc'
+ data_dest = 'ERA5_merged_' + str(year) + str(month).zfill(2) + '.nc'
+
+ # Step 1: convert lat/lon in the pressure level file to range [-180,180], [-90,90]
+ # Extract the variables we need for the similarity check in a way that closes the files implicitly
+ with nc4.Dataset(forcingPath / data_pres) as src1, nc4.Dataset(forcingPath / data_surf) as src2:
+ pres_lat = src1.variables['latitude'][:]
+ pres_lon = src1.variables['longitude'][:]
+ pres_time = src1.variables['time'][:]
+ surf_lat = src2.variables['latitude'][:]
+ surf_lon = src2.variables['longitude'][:]
+ surf_time = src2.variables['time'][:]
+
+ # Update the pressure level coordinates
+ pres_lat[pres_lat > 90] = pres_lat[pres_lat > 90] - 180
+ pres_lon[pres_lon > 180] = pres_lon[pres_lon > 180] - 360
+
+ # Step 2: check that coordinates and time are the same between the both files
+ # Compare dimensions (lat, long, time)
+ flag_loc_and_time_same = [all(pres_lat == surf_lat), all(pres_lon == surf_lon), all(pres_time == surf_time)]
+
+ # Check that they are all the same
+ if not all(flag_loc_and_time_same):
+ err_txt = 'Dimension mismatch while merging ' + data_pres + ' and ' + data_surf + '. Check latitude, longitude and time dimensions in both files. Continuing with next files.'
+ print(err_txt)
+ continue
+
+ # Step 3: combine everything into a single .nc file
+ # Order of writing things:
+ # - Meta attributes from both source files
+ # - Dimensions (lat, lon, time)
+ # - Variables: long, lat and time
+ # - Variables: forcing at surface
+ # - Variables: forcing at pressure level 137
+
+ # Define the variables we want to transfer
+ variables_surf_transfer = ['longitude' ,'latitude' ,'time']
+ variables_surf_convert = ['sp' ,'mtpr' ,'msdwswrf' ,'msdwlwrf']
+ variables_pres_convert = ['t' ,'q']
+ attr_names_expected = ['scale_factor' ,'add_offset' ,'_FillValue' ,'missing_value' ,'units' ,'long_name'
+ ,'standard_name'] # these are the attributes we think each .nc variable has
+ loop_attr_copy_these = ['units' ,'long_name'
+ ,'standard_name'] # we will define new values for _FillValue and missing_value when writing the .nc variables' attributes
+
+ # Open the destination file and transfer information
+ with nc4.Dataset(forcingPath / data_pres) as src1, nc4.Dataset(forcingPath / data_surf) as src2, nc4.Dataset \
+ (mergePath / data_dest, "w") as dest:
+
+ # === Some general attributes
+ dest.setncattr('History' ,'Created ' + time.ctime(time.time()))
+ dest.setncattr('Language' ,'Written using Python')
+ dest.setncattr('Reason'
+ ,'(1) ERA5 surface and pressure files need to be combined into a single file (2) Wind speed U and V components need to be combined into a single vector (3) Forcing variables need to be given to SUMMA without scale and offset')
+
+ # === Meta attributes from both sources
+ for name in src1.ncattrs():
+ dest.setncattr(name + ' (pressure level (10m) data)', src1.getncattr(name))
+ for name in src2.ncattrs():
+ dest.setncattr(name + ' (surface level data)', src1.getncattr(name))
+
+ # === Dimensions: latitude, longitude, time
+ # NOTE: we can use the lat/lon from the surface file (src2), because those are already in proper units. If there is a mismatch between surface and pressure we shouldn't have reached this point at all due to the check above
+ for name, dimension in src2.dimensions.items():
+ if dimension.isunlimited():
+ dest.createDimension( name, None)
+ else:
+ dest.createDimension( name, len(dimension))
+
+ # === Get the surface level generic variables (lat, lon, time)
+ for name, variable in src2.variables.items():
+
+ # Transfer lat, long and time variables because these don't have scaling factors
+ if name in variables_surf_transfer:
+ dest.createVariable(name, variable.datatype, variable.dimensions, fill_value = -999)
+ dest[name].setncatts(src1[name].__dict__)
+ dest.variables[name][:] = src2.variables[name][:]
+
+ # === For the forcing variables, we need to:
+ # 1. Extract them (this automatically applies scaling and offset with nc4) and apply non-negativity constraints
+ # 2. Create a .nc variable with the right SUMMA name and file type
+ # 3. Put all data into the new .nc file
+
+ # === Transfer the surface level data first, for no particular reason
+ # This should contain surface pressure (sp), downward longwave (msdwlwrf), downward shortwave (msdwswrf) and precipitation (mtpr)
+ for name, variable in src2.variables.items():
+
+ # Check that we are only using the names we expect, and thus the names for which we have the required code ready
+ if name in variables_surf_convert:
+
+ # 0. Reset the dictionary that we keep attribute values in
+ loop_attr_source_values = {name: 'n/a' for name in attr_names_expected}
+
+ # 1a. Get the values of this variable from the source (this automatically applies scaling and offset)
+ loop_val = variable[:]
+
+ # 1b. Apply non-negativity constraint. This is intended to remove very small negative data values that sometimes occur
+ loop_val[loop_val < 0] = 0
+
+ # 1c. Get the attributes for this variable from source
+ for attrname in variable.ncattrs():
+ loop_attr_source_values[attrname] = variable.getncattr(attrname)
+
+ # 2a. Find what this ERA5 variable should be called in SUMMA
+ if name == 'sp':
+ name_summa = 'airpres'
+ elif name == 'msdwlwrf':
+ name_summa = 'LWRadAtm'
+ elif name == 'msdwswrf':
+ name_summa = 'SWRadAtm'
+ elif name == 'mtpr':
+ name_summa = 'pptrate'
+ else:
+ name_summa = 'n/a/' # no name so we don't start overwriting data if a new name is not defined for some reason
+
+ # 2b. Create the .nc variable with the proper SUMMA name
+ # Inputs: variable name as needed by SUMMA; data type: 'float'; dimensions; no need for fill value, because thevariable gets populated in this same script
+ dest.createVariable(name_summa, 'f4', ('time' ,'latitude' ,'longitude'), fill_value = False)
+
+ # 3a. Select the attributes we want to copy for this variable, based on the dictionary defined before the loop starts
+ loop_attr_copy_values = {use_this: loop_attr_source_values[use_this] for use_this in loop_attr_copy_these}
+
+ # 3b. Copy the attributes FIRST, so we don't run into any scaling/offset issues
+ dest[name_summa].setncattr('missing_value' ,-999)
+ dest[name_summa].setncatts(loop_attr_copy_values)
+
+ # 3c. Copy the data SECOND
+ dest[name_summa][:] = loop_val
+
+ # === Transfer the pressure level variables next, using the same procedure as above
+ for name, variable in src1.variables.items():
+ if name in variables_pres_convert:
+
+ # 0. Reset the dictionary that we keep attribute values in
+ loop_attr_source_values = {name: 'n/a' for name in attr_names_expected}
+
+ # 1a. Get the values of this variable from the source (this automatically applies scaling and offset)
+ loop_val = variable[:]
+
+ # 1b. Get the attributes for this variable from source
+ for attrname in variable.ncattrs():
+ loop_attr_source_values[attrname] = variable.getncattr(attrname)
+
+ # 2a. Find what this ERA5 variable should be called in SUMMA
+ if name == 't':
+ name_summa = 'airtemp'
+ elif name == 'q':
+ name_summa = 'spechum'
+ elif name == 'u':
+ name_summa = 'n/a/' # we shouldn't reach this part of the code, because 'u' is not specified in 'variables_pres_convert'
+ elif name == 'v':
+ name_summa = 'n/a' # as with 'u', because both are needed to calculate total wind speed first
+ else:
+ name_summa = 'n/a/' # no name so we don't start overwriting data if a new name is not defined for some reason
+
+ # 2b. Create the .nc variable with the proper SUMMA name
+ # Inputs: variable name as needed by SUMMA; data type: 'float'; dimensions; no need for fill value, because thevariable gets populated in this same script
+ dest.createVariable(name_summa, 'f4', ('time' ,'latitude' ,'longitude'), fill_value = False)
+
+ # 3a. Select the attributes we want to copy for this variable, based on the dictionary defined before the loop starts
+ loop_attr_copy_values = {use_this: loop_attr_source_values[use_this] for use_this in loop_attr_copy_these}
+
+ # 3b. Copy the attributes FIRST, so we don't run into any scaling/offset issues
+ dest[name_summa].setncattr('missing_value' ,-999)
+ dest[name_summa].setncatts(loop_attr_copy_values)
+
+ # 3c. Copy the data SECOND
+ dest[name_summa][:] = loop_val
+
+ # === Calculate combined wind speed and store
+ # 1a. Get the values of this variable from the source (this automatically applies scaling and offset)
+ pres_u = src1.variables['u'][:]
+ pres_v = src1.variables['v'][:]
+
+ # 1b. Create the variable attribute 'units' from the source data. This lets us check if the source units match (they should match)
+ unit_u = src1.variables['u'].getncattr('units')
+ unit_v = src1.variables['v'].getncattr('units')
+ unit_w = '(({})**2 + ({})**2)**0.5'.format(unit_u ,unit_v)
+
+ # 2a. Set the summa_name
+ name_summa = 'windspd'
+
+ # 2b. Create the .nc variable with the proper SUMMA name
+ # Inputs: variable name as needed by SUMMA; data type: 'float'; dimensions; no need for fill value, because thevariable gets populated in this same script
+ dest.createVariable(name_summa ,'f4' ,('time' ,'latitude' ,'longitude') ,fill_value = False)
+
+ # 3a. Set the attributes FIRST, so we don't run into any scaling/offset issues
+ dest[name_summa].setncattr('missing_value' ,-999)
+ dest[name_summa].setncattr('units' ,unit_w)
+ dest[name_summa].setncattr('long_name'
+ ,'wind speed at the measurement height, computed from ERA5 U and V-components')
+ dest[name_summa].setncattr('standard_name' ,'wind_speed')
+
+ # 3b. Copy the data SECOND
+ # Creating a new variable first and writing to .nc later seems faster than directly writing to .nc
+ pres_w = ((pres_u**2)+(pres_v**2))**0.5
+ dest[name_summa][:] = pres_w
+
+ print('Finished merging {} and {} into {}'.format(data_surf ,data_pres ,data_dest))
diff --git a/cwarhm/data_specific_processing/merit.py b/cwarhm/data_specific_processing/merit.py
new file mode 100644
index 0000000..1a62af7
--- /dev/null
+++ b/cwarhm/data_specific_processing/merit.py
@@ -0,0 +1,182 @@
+from pathlib import Path
+import numpy as np
+import requests
+import shutil
+import os
+from osgeo import gdal
+import glob
+import tarfile
+
+def all_merit_variables():
+ """Returns a list of all MERIT variables
+
+ :return: List of all MERIT variables available for download
+ :rtype: List
+ """
+ return ['dir','elv','upa','upg','wth','hnd']
+
+def download_merit(target_folder,credentials: dict,variables=['elv'],bbox: list=None,retries_max=10,
+ merit_url='http://hydro.iis.u-tokyo.ac.jp/~yamadai/MERIT_Hydro/distribute/v1.0.1/'):
+ """Downloads MERIT data from source. Adapted from CWARHM (Knoben et Al., 2022).
+
+ :param target_folder: root of target folder to save dowloaded MERIT data
+ :type target_folder: string
+ :param credentials: credentials to download MERIT data of the form dict(user='user',pass='pass')
+ :type credentials: dict
+ :param variables: List of variables to download, defaults to ['elv']
+ :type variables: list, optional
+ :param bbox: bounding box in lat,lon to download subsection of spatial extent [xmin,ymin,xmax,ymax], defaults to None
+ :type bbox: list, optional
+ :param retries_max: number of retries for downloading a file, defaults to 10
+ :type retries_max: int, optional
+ :param merit_url: base url of download source, defaults to 'http://hydro.iis.u-tokyo.ac.jp/~yamadai/MERIT_Hydro/distribute/v1.0.1/'
+ :type merit_url: str, optional
+ """
+
+ # define all possible variables
+ if variables=='all':
+ variables=all_merit_variables()
+
+ # Define the edges of the download areas
+ lon_right_edge = np.array([-150,-120, -90,-60,-30, 0,30,60,90,120,150,180])
+ lon_left_edge = np.array([-180,-150,-120,-90,-60,-30, 0,30,60, 90,120,150])
+ lat_bottom_edge = np.array([-60,-30,0, 30,60]) # NOTE: latitudes -90 to -60 are NOT part of the MERIT domain
+ lat_top_edge = np.array([-30, 0,30,60,90])
+ # list all the tile combinations
+ dl_lon_all = np.array(['w180','w150','w120','w090','w060','w030','e000','e030','e060','e090','e120','e150'])
+ dl_lat_all = np.array(['s60','s30','n00','n30','n60'])
+
+ if bbox:
+ domain_min_lon = np.array(float(bbox[0])) #xmin
+ domain_min_lat = np.array(float(bbox[1])) #ymin
+ domain_max_lon = np.array(float(bbox[2])) #xmax
+ domain_max_lat = np.array(float(bbox[3])) #ymax
+
+ # Find the lower-left corners of each download square
+ dl_lons = dl_lon_all[(domain_min_lon < lon_right_edge) & (domain_max_lon > lon_left_edge)]
+ dl_lats = dl_lat_all[(domain_min_lat < lat_top_edge) & (domain_max_lat > lat_bottom_edge)]
+
+ else:
+ dl_lons = dl_lon_all
+ dl_lats = dl_lat_all
+ print('dl_lons: {} | dl_lats: {}'.format(dl_lons,dl_lats) )
+ # Loop over the download files
+ for variable in variables:
+ print('downloading variable {}'.format(variable))
+ for dl_lon in dl_lons:
+ print('for tile lon {}'.format(dl_lon))
+ for dl_lat in dl_lats:
+ print('and tile lat {}'.format(dl_lat))
+ # Skip those combinations for which no MERIT data exists
+ if (dl_lat == 'n00' and dl_lon == 'w150') or \
+ (dl_lat == 's60' and dl_lon == 'w150') or \
+ (dl_lat == 's60' and dl_lon == 'w120'):
+ continue
+
+ # Make the download URL
+ file_url = (merit_url + '{}_{}{}.tar').format(variable,dl_lat,dl_lon)
+
+ # Extract the filename from the URL
+ file_name = file_url.split('/')[-1].strip() # Get the last part of the url, strip whitespace and characters
+ # set full download file target path
+ target_dir = Path(target_folder+'/{}/'.format(variable))
+ target_dir.mkdir(parents=True,exist_ok=True)
+ target_path = Path(target_folder+'/{}/'.format(variable)+file_name)
+ # If file already exists in destination, move to next file
+ if os.path.isfile(target_path):
+ print('{} already exist, skipping download'.format(target_path))
+ continue
+ # Make sure the connection is re-tried if it fails
+ retries_cur = 1
+ while retries_cur <= retries_max:
+ try:
+
+ # Send a HTTP request to the server and save the HTTP response in a response object called resp
+ # 'stream = True' ensures that only response headers are downloaded initially (and not all file contents too, which are 2GB+)
+ with requests.get(file_url.strip(), auth=(credentials['user'],credentials['pass']), stream=True) as response:
+
+ # Decode the response
+ response.raw.decode_content = True
+ content = response.raw
+
+ # Write to file
+ with open(target_path, 'wb') as data:
+ shutil.copyfileobj(content, data)
+
+ # print a completion message
+ print('Successfully downloaded {}'.format(target_path))
+ break
+ except Exception as e:
+ print('Error downloading ' + file_url + ' on try ' + str(retries_cur) + ' with error: ' + str(e))
+ retries_cur += 1
+ continue
+ else:
+ break
+
+def extract_merit_tars(data_path,extracted_path,variables=['']):
+ """Extract MERIT zipped data downloaded by download_merit
+
+ :param data_path: root path to MERIT data
+ :type data_path: string
+ :param extracted_path: root path to extracted MERIT data to
+ :type extracted_path: string
+ :param variables: List of variables to extract. 'all' will generate a list of all the MERIT variables, defaults to ['']
+ :type variables: list, optional
+ """
+ # define all possible variables
+ if variables=='all':
+ variables=all_merit_variables()
+ print('variables to extract are {}'.format(variables))
+ # loop over variables
+ for variable in variables:
+ print('extracting variable {}'.format(variable))
+ # set variable as folder for extracted files
+ extracted_path_variable = Path(extracted_path+'/'+variable)
+ # make folder if does not exist
+ extracted_path_variable.mkdir(parents=True,exist_ok=True)
+ # list all tarfiles
+ tarfiles = glob.glob(data_path+'/{}*/'.format(variable)+'/*.tar')
+ print(tarfiles)
+ # loop over all tarfiles and extract
+ for tar_file in tarfiles:
+ with tarfile.open(tar_file) as my_tar:
+ my_tar.extractall(extracted_path_variable)
+
+def build_merit_vrt(data_path_in, vrt_out_dir, variables=[''], **build_options):
+ """Build gdal virtual data set (vrt) from extracted MERIT variables.
+
+ :param data_path_in: Root path of extracted MERIT data
+ :type data_path_in: string
+ :param vrt_out_dir: Root path to save vrts to
+ :type vrt_out_dir: string
+ :param variables: List of MERIT variables to build vrt for, defaults to ['']. 'all' generates a list of all MERIT variables
+ :type variables: list, optional
+ """
+ # define all possible variables
+ if variables=='all':
+ variables=all_merit_variables()
+ print('variables to process vrt: {}'.format(variables))
+ for variable in variables:
+
+ # set list of files to make into vrt
+ tifslist = glob.glob(data_path_in+'/{}*/'.format(variable)+'/*/'+'*.tif')
+ print(tifslist)
+ # construct vrt out path
+ vrt_out = vrt_out_dir+'/{}/'.format(variable)+'/{}.vrt'.format(variable)
+ # Make folder structure for vrt_out
+ vrt_dir= Path(os.path.dirname(vrt_out))
+ vrt_dir.mkdir(parents=True,exist_ok=True)
+ print('result saved as {}'.format(vrt_out))
+ # execute gdal build to make vrt
+ vrt_options = gdal.BuildVRTOptions(**build_options)
+ my_vrt = gdal.BuildVRT(vrt_out, tifslist, options=vrt_options)
+ my_vrt.FlushCache()
+
+
+
+credentials = dict(user='hydrography',password='rivernetwork')
+
+#test bow at banff download_merit('/Users/ayx374/data/merit_hydro',credentials,bbox=[-116.55,50.95,-115.52,51.74])
+#download_merit('/home/ayx374/projects/rpp-kshook/CompHydCore/merit_hydro/raw_data',credentials,variables='all')
+#extract_merit_tars('/home/ayx374/projects/rpp-kshook/CompHydCore/merit_hydro/raw_data','/home/ayx374/projects/rpp-kshook/CompHydCore/merit_hydro/extracted',variables='all')
+#build_merit_vrt('/home/ayx374/projects/rpp-kshook/CompHydCore/merit_hydro/extracted','/home/ayx374/projects/rpp-kshook/CompHydCore/merit_hydro/vrts',variables='all',resolution='highest')
\ No newline at end of file
diff --git a/cwarhm/data_specific_processing/modis.py b/cwarhm/data_specific_processing/modis.py
new file mode 100644
index 0000000..e69de29
diff --git a/cwarhm/data_specific_processing/soilgrids.py b/cwarhm/data_specific_processing/soilgrids.py
new file mode 100644
index 0000000..e69de29
diff --git a/cwarhm/model_agnostic_processing/HRU.py b/cwarhm/model_agnostic_processing/HRU.py
new file mode 100644
index 0000000..348e80c
--- /dev/null
+++ b/cwarhm/model_agnostic_processing/HRU.py
@@ -0,0 +1,249 @@
+'''Functions that relate to aggregating to HRUs:
+Hydrological Response Units'''
+
+import xarray as xr
+import rasterio
+import geopandas as gpd
+import numpy as np
+import pandas as pd
+from rasterstats import zonal_stats
+import datetime
+from easymore.easymore import easymore
+
+def hru_zonal_statistics(input_raster: str,input_shape: str,
+ input_ddb: str,out_parameter: str):
+ """WIP: from MESH workflow see
+ https://wiki.usask.ca/display/MESH/MESH+vector-based+workflow+using+EASYMORE#MESHvectorbasedworkflowusingEASYMORE-2.3.Calculatelandcoverzonalhistogram
+
+ :param input_raster: _description_
+ :type input_raster: str
+ :param input_shape: _description_
+ :type input_shape: str
+ :param input_ddb: _description_
+ :type input_ddb: str
+ :param out_parameter: _description_
+ :type out_parameter: str
+ """
+
+ #%% reading the inputs
+ gridded_class_data = rasterio.open(input_raster)
+ gru_shapes = gpd.read_file(input_shape)
+ drainage_db = xr.open_dataset(input_ddb)
+
+ # %% extract indices of lc based on the drainage database
+ n = len(drainage_db.hruId)
+ ind = []
+ hruid = drainage_db.variables['hruId']
+
+ for i in range(n):
+ fid = np.where(np.int32(gru_shapes['COMID'].values) == hruid[i].values)[0]
+ ind = np.append(ind, fid)
+
+ ind = np.int32(ind)
+
+ #%% Read the raster values
+ sand = gridded_class_data.read(1)
+
+ # Get the affine
+ affine = gridded_class_data.transform
+
+ #%% calculate zonal status
+ zs = zonal_stats(gru_shapes, sand, affine=affine, stats='majority')
+ zs = pd.DataFrame(zs)
+
+ # reorder the zonal stats from Rank1 to RankN
+ zs_reorder = zs.values[ind]
+
+ # %% convert the distributed parameters as a dataset and save it as netcdf
+ lon = drainage_db['lon'].values
+ lat = drainage_db['lat'].values
+ tt = drainage_db['time'].values
+
+ dist_param = xr.Dataset(
+ {
+ "GRU": (["subbasin", "gru"], zs_reorder),
+ },
+ coords={
+ "lon": (["subbasin"], lon),
+ "lat": (["subbasin"], lat),
+ },
+ )
+
+ # meta data attributes
+ dist_param.attrs['Conventions'] = 'CF-1.6'
+ dist_param.attrs['history'] = 'Created ' + datetime.now().strftime('%Y/%m/%d %H:%M:%S')
+ dist_param.attrs['featureType'] = 'point'
+
+ # editing lat attribute
+ dist_param['lat'].attrs['standard_name'] = 'latitude'
+ dist_param['lat'].attrs['units'] = 'degrees_north'
+ dist_param['lat'].attrs['axis'] = 'Y'
+
+ # editing lon attribute
+ dist_param['lon'].attrs['standard_name'] = 'longitude'
+ dist_param['lon'].attrs['units'] = 'degrees_east'
+ dist_param['lon'].attrs['axis'] = 'X'
+
+ # coordinate system
+ dist_param['crs'] = drainage_db['crs'].copy()
+
+ dist_param.to_netcdf(out_parameter)
+
+def hru_fraction_from_counts(basin_class_counts: gpd.geodataframe.GeoDataFrame):
+ """calculate fractions based on counts across classes in polygon
+
+ Assumes the columns (names not important) [GRU_ID, HRU_ID,
+ center_lat, center_lon, HRU_area]. And then the columns that have
+ the count for each class, last column is the geometries.
+ This is the shapefile as generated by 3_find_HRU_land_classes.py
+ in cwarhm-summa.
+
+ :param landclass_counts: geopandas dataframe with counts for each class (see long description)
+ :type landclass_counts: geopandas.geodataframe.GeoDataFrame
+ :return: geopandas dataframe with fractions for each class
+ :rtype: geopandas.geodataframe.GeoDataFrame
+ """
+
+ # this can be changed to select the columns based on an identifier
+ count_columns = basin_class_counts.iloc[:,5:-1]
+ # convert to array for calculation
+ count_columns_array = count_columns.values
+ # number of columns
+ n = len(count_columns.columns)
+ # number of total counts - in an 1D array
+ total_counts_per_basin = count_columns.sum(axis=1).values
+ # calculate fraction per basin of each landclass
+ # the first transpose is to make numpy divide each element in a row with the sum
+ # the second transpose is to fit again with the geopandas array
+ fraction_per_basin = np.divide(count_columns_array.transpose(),total_counts_per_basin).transpose()
+ # create new geopandas object with fractional values
+ new_frac_headers = [c+'_frac' for c in count_columns.columns]
+ new_headers = list(basin_class_counts.columns[0:5]) + list(new_frac_headers) + ['geometry']
+ # make hard copy of old frame
+ fraction_per_basin_gdf = basin_class_counts.copy()
+ # replace headers
+ fraction_per_basin_gdf.columns = new_headers
+ # replace column values
+ fraction_per_basin_gdf.iloc[:,5:-1]=fraction_per_basin
+
+ return fraction_per_basin_gdf
+
+def gru_fraction_from_hru_counts(basin_class_counts: gpd.geodataframe.GeoDataFrame):
+ """calculate class fraction for each GRU
+
+ Assumes the columns (names of GRU_ID should match) [GRU_ID, HRU_ID,
+ center_lat, center_lon, HRU_area]. And then the columns that have
+ the count for each class for each HRU, last column is the geometries.
+ This is the shapefile as generated by 3_find_HRU_land_classes.py
+ in cwarhm-summa.
+
+ :param basin_class_counts: a geopandas dataframe with GRU_IDs, and counts per HRU_IDs (see long description)
+ :type basin_class_counts: gpd.geodataframe.GeoDataFrame
+ :return: data frame with fractions per class type as columns
+ :rtype: pandas.core.frame.DataFrame
+ """
+ #set index to GRU_ID and select count columns
+ count_columns = basin_class_counts.iloc[:,5:-1]
+ count_columns['GRU_ID'] = basin_class_counts['GRU_ID']
+ # calculate number of classes
+ n_classes = len(count_columns.columns)-1
+ # group df by GRU_ID
+ gru_groups = count_columns.groupby('GRU_ID')
+ # set up np.array for results
+ frac_array = np.empty((len(gru_groups),n_classes))
+ # calculate fraction for each GRU in total
+ for i, (gru_id, counts) in enumerate(gru_groups):
+ #print(counts.head())
+ counts_id = counts.set_index('GRU_ID')
+ counts_per_class = counts_id.sum(axis=0)
+ total_counts = counts_per_class.sum()
+ fraction_per_class = counts_per_class/total_counts
+ # fill array
+ frac_array[i,:]=fraction_per_class
+
+ # create dataframe
+ df = pd.DataFrame(frac_array)
+ df['GRU_ID'] = gru_groups.groups.keys()
+ df.columns = count_columns.columns
+ df =df.set_index('GRU_ID')
+
+ return df
+
+def map_forcing_data(basin,forcing_data, output_dir,
+ var_names:list, var_lon='lon', var_lat='lat', var_time='time',
+ case_name='workflow',temp_dir='./esmr_temp/', format_list=['f4'],
+ fill_value_list = ['-999'], save_csv = False,
+ **esmr_kwargs
+ ):
+ """Wrapper to create an EASYMORE object with default options
+
+ Parameters
+ ----------
+ basin : str
+ path to the shapefile with the basins
+ forcing_data : str
+ path to the gridded forcing netcdf file
+ output_dir : str
+ path of the output directory to store remapped nc files
+ var_names : list
+ list with variable names to remap
+ var_lon : str, optional
+ name of the longitude dimension in nc file, by default 'lon'
+ var_lat : str, optional
+ name of latitude dimension in nc file, by default 'lat'
+ var_time : str, optional
+ name of time dimension in nc file, by default 'time'
+ case_name : str, optional
+ name of the case, by default 'workflow'
+ temp_dir : str, optional
+ directory to store intermediate products in, by default './esmr_temp/'
+ format_list : list, optional
+ format of the variables to be saved in remapped files, by default ['f4']
+ fill_value_list : list, optional
+ fill value in remapped file, by default ['-999']
+ save_csv : bool, optional
+ flag to save results as csv in addition to nc, by default False
+ """
+ print(esmr_kwargs)
+ # %% initializing EASYMORE object
+ esmr = easymore()
+
+ # specifying EASYMORE objects
+ # name of the case; the temporary, remapping and remapped file names include case name
+ esmr.case_name = case_name
+ # temporary path that the EASYMORE generated GIS files and remapped file will be saved
+ esmr.temp_dir = temp_dir
+ # name of target shapefile that the source netcdf files should be remapped to
+ esmr.target_shp = basin
+
+ # name of netCDF file(s); multiple files can be specified with *
+ esmr.source_nc = forcing_data
+
+ esmr.var_names = var_names
+ # rename the variables from source netCDF file(s) in the remapped files;
+ # it will be the same as source if not provided
+ # esmr.var_names_remapped = ['PR','RDRS_v2_P_FI_SFC','FB','RDRS_v2_P_TT_09944','UV','RDRS_v2_P_P0_SFC','HU']
+ if 'var_names_remapped' in esmr_kwargs:
+ print('renaming variables')
+ esmr.var_names_remapped = esmr_kwargs['var_names_remapped']
+
+ # name of variable longitude in source netCDF files
+ esmr.var_lon = var_lon
+ # name of variable latitude in source netCDF files
+ esmr.var_lat = var_lat
+ # name of variable time in source netCDF file; should be always time
+ esmr.var_time = var_time
+ # location where the remapped netCDF file will be saved
+ esmr.output_dir = output_dir
+ # format of the variables to be saved in remapped files,
+ # if one format provided it will be expanded to other variables
+ esmr.format_list = format_list
+ # fill values of the variables to be saved in remapped files,
+ # if one value provided it will be expanded to other variables
+ esmr.fill_value_list = fill_value_list
+ # if required that the remapped values to be saved as csv as well
+ esmr.save_csv = save_csv
+
+ #execute EASYMORE
+ # Note: remapped forcing has the precision of float32
+ esmr.nc_remapper()
\ No newline at end of file
diff --git a/cwarhm/model_agnostic_processing/__init__.py b/cwarhm/model_agnostic_processing/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/cwarhm/model_specific_processing/__init__.py b/cwarhm/model_specific_processing/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/cwarhm/model_specific_processing/mesh.py b/cwarhm/model_specific_processing/mesh.py
new file mode 100644
index 0000000..89b3d6e
--- /dev/null
+++ b/cwarhm/model_specific_processing/mesh.py
@@ -0,0 +1,1518 @@
+'''functions to write MESH specific input files'''
+
+import cwarhm.model_specific_processing.mizuroute as mizu
+import pandas as pd
+import netCDF4 as nc4
+import geopandas as gpd
+import xarray as xr
+import numpy as np
+from datetime import datetime
+from datetime import date
+import warnings
+import ntpath
+
+
+def generate_mesh_topology(infile_river_shp, infile_basin_shp, outfile_topology_nc, river_outlet_ids,
+ basin_hru_id = 'COMID', basin_hru_to_seg = 'hru_to_seg', basin_hru_area = 'area',
+ river_seg_id = 'COMID', river_down_seg_id = 'NextDownID', river_slope = 'slope',
+ river_length = 'length' , fake_river=False):
+ """generate mesh topology .nc file
+
+ This function is based on :py:func:mizuroute:generate_mizuroute_topology
+ The network topology contains information about the stream network and the routing basins the network is in. These include:
+
+ 1. Unique indices of the stream segment;
+ 2. Unique indices of the routing basins (HRUs; equivalent to SUMMA GRUs in this setup);
+ 3. ID of the stream segment each individual segment connects to (should be 0 or negative number to indicate that segment is an outlet);
+ 4. ID of the stream segment a basin drains into;
+ 5. Basin area;
+ 6. Segment slope;
+ 7. Segment length.
+
+ Values for these settings are taken from the user's shapefiles. See: https://mizuroute.readthedocs.io/en/master/Input_data.html
+
+ :param infile_river_shp: path to river shapefile
+ :type infile_river_shp: file path .shp
+ :param infile_basin_shp: path to basin shapefile
+ :type infile_basin_shp: file path .shp
+ :param outfile_topology_nc: path to save output netCDF file
+ :type outfile_topology_nc: file path .nc
+ :param river_outlet_ids: river_seg_ids that need to be set as outlet, comma seperated for multiple
+ :type river_outlet_ids: str
+ :param basin_hru_id: name of the routing basin id column in :param:infile_basin_shp , defaults to 'COMID'
+ :type basin_hru_id: str, optional
+ :param basin_hru_to_seg: name of the column that shows which river segment each HRU connects to, defaults to 'hru_to_seg'
+ :type basin_hru_to_seg: str, optional
+ :param basin_hru_area: Name of the catchment area column. Area must be in units [m^2], defaults to 'area'
+ :type basin_hru_area: str, optional
+ :param river_seg_id: Name of the segment ID column in :param:infile_river_shp, defaults to 'COMID'
+ :type river_seg_id: str, optional
+ :param river_down_seg_id: Name of the downstream segment ID column, defaults to 'NextDownID'
+ :type river_down_seg_id: str, optional
+ :param river_slope: Name of the slope column. Slope must be in in units [length/length]., defaults to 'slope'
+ :type river_slope: str, optional
+ :param river_length: Name of the segment length column. Length must be in units [m], defaults to 'length'
+ :type river_length: str, optional
+ :param fake_river: Flag to attempt creating fake river network for headwater basins , defaults to False
+ :type fake_river: bool, optional
+ """
+
+ shp_basin = gpd.read_file(infile_basin_shp)
+ shp_river = gpd.read_file(infile_river_shp)
+
+ # Added by MESH workflow
+ # sort basin to be consistent with river
+ shp_basin = shp_basin.sort_values(by=basin_hru_id)
+
+ # convert area to m^2
+ # Note: if area unit is already based on m**2, it is not requried to covert m**2
+ # shp_basin[basin_hru_area].values[:] = shp_basin[basin_hru_area].values[:]*10**6
+
+ # covert river_length to m
+ # Note: if length unit is already based on m, it is not requried to covert m
+ # shp_river[river_length].values[:] = shp_river[river_length].values[:]*1000
+
+ # adding centroid of each subbasin.
+ # Note: the more accurate should be done in equal area projection
+ warnings.simplefilter('ignore') # silent the warning
+ shp_basin['lon'] = shp_basin.centroid.x
+ shp_basin['lat'] = shp_basin.centroid.y
+ warnings.simplefilter('default') # back to normal
+
+ # specifying other variables
+ # Note: the river width and manning is optional. The manning coefficient is specified in the MESH
+ # hydrology configuration file
+ shp_river['width'] = 50
+ shp_river['manning'] = 0.03
+
+ #%% Find the number of segments and subbasins
+ num_seg = len(shp_river)
+ num_hru = len(shp_basin)
+
+ # finished edit by MESH workflow
+
+ # Ensure that the most downstream segment in the river network has a downstream_ID of 0
+ # This indicates to mizuRoute that this segment has no downstream segment attached to it
+ mizu.enforce_outlets_from_control(shp_river, river_outlet_ids, river_seg_id, river_down_seg_id)
+ # Make the netcdf file
+ with nc4.Dataset(outfile_topology_nc, 'w', format='NETCDF4') as ncid:
+ # Set general attributes
+ now = datetime.now()
+ ncid.setncattr('Author', "Created by MESH vector-based workflow scripts")
+ ncid.setncattr('History','Created ' + now.strftime('%Y/%m/%d %H:%M:%S'))
+ ncid.setncattr('Purpose','Create a river network .nc file for WATROUTE routing')
+ # Define the seg and hru dimensions
+ # it can be renamed to 'subbasin'
+ # Added by MESH workflow
+ ncid.createDimension('n', num_seg)
+ # ncid.createDimension('hru', num_hru)
+ # finished edit by MESH workflow
+
+ # --- Variables
+ mizu.create_and_fill_nc_var(ncid, 'segId', 'int', 'n', False, shp_river[river_seg_id].values.astype(int), 'Unique ID of each stream segment', '-')
+ mizu.create_and_fill_nc_var(ncid, 'downSegId', 'int', 'n', False, shp_river[river_down_seg_id].values.astype(int), 'ID of the downstream segment', '-')
+ mizu.create_and_fill_nc_var(ncid, 'slope', 'f8', 'n', False, shp_river[river_slope].values.astype(float), 'Segment slope', '-')
+ # added by MESH workflow
+ mizu.create_and_fill_nc_var(ncid, 'lon', 'f8', 'n', False, \
+ shp_basin['lon'].values.astype(float), \
+ 'longitude', '-')
+ mizu.create_and_fill_nc_var(ncid, 'lat', 'f8', 'n', False, \
+ shp_basin['lat'].values.astype(float), \
+ 'latitude', '-')
+ # finished edit by MESH workflow
+ mizu.create_and_fill_nc_var(ncid, 'length', 'f8', 'n', False, shp_river[river_length].values.astype(float), 'Segment length', 'm')
+ mizu.create_and_fill_nc_var(ncid, 'hruId', 'int', 'n', False, shp_basin[basin_hru_id].values.astype(int), 'Unique hru ID', '-')
+ mizu.create_and_fill_nc_var(ncid, 'hruToSegId', 'int', 'n', False, shp_basin[basin_hru_to_seg].values.astype(int), 'ID of the stream segment to which the HRU discharges', '-')
+ mizu.create_and_fill_nc_var(ncid, 'area', 'f8', 'n', False, shp_basin[basin_hru_area].values.astype(float), 'HRU area', 'm^2')
+ # added by MESH workflow
+ mizu.create_and_fill_nc_var(ncid, 'width', 'f8', 'n', False, \
+ shp_river['width'].values.astype(float), \
+ 'width', 'm')
+ mizu.create_and_fill_nc_var(ncid, 'manning', 'f8', 'n', False, \
+ shp_river['manning'].values.astype(float), \
+ 'manning', '-')
+ # finished edit by MESH workflow
+
+#%% Function reindex to extract drainage database variables
+def reindex_topology_file(in_ddb: str):
+ """reindex topology file to match MESH requirements
+
+ MESH requires stream segment IDs to be ordered from highest to lowest
+ segment by receiving order, from 1 to the total number of segments in
+ the domain (NA). This information is passed to MESH from the
+ "drainage database" (or basin information file), where the IDs
+ of stream segments are defined in the "Rank" variable, and the
+ receiving order is defined in the "Next" variable, which contains
+ the ID of the segment that the current stream segment flows in to.
+
+ :param in_ddb: topology netcdf file as created by :func:generate_mesh_topology
+ :type in_ddb: str (file path)
+ :return: the new ranks
+ :rtype: list
+ :return: topology xarray dataset extended with the new ranks
+ :rtype: xarray dataset
+ """
+ #% reading the input DDB
+ drainage_db = xr.open_dataset(in_ddb)
+ drainage_db.close()
+
+ # Count the number of outlets
+ outlets = np.where(drainage_db['downSegId'].values == 0)[0]
+
+ # % Re-indexing seg_id and tosegment
+ # Get the segment ID associated with the outlet.
+ first_index = drainage_db['segId'].values[outlets[0]]
+
+ # Create a copy of the 'downSegId' field.
+ old_next = drainage_db['downSegId'].values.copy()
+
+ ## Set the current 'Next' and 'Rank' values.
+ # total number of values
+ current_next = len(drainage_db['segId'])
+ # total number of values less number of outlets
+ current_rank = current_next - len(outlets)
+
+ ## Create dummy arrays for new values.
+ # size of 'segId''
+ new_next = [0]*len(drainage_db['segId'])
+ # empty list (to push values to)
+ next_rank = []
+ # list to append positions of new 'rank', first element is position of outlet
+ new_rank = [outlets[0]]
+
+ # % Reorder seg_id and tosegment
+ while (first_index != -1):
+ for i in range(len(old_next)):
+ if (old_next[i] == first_index):
+ # save rank of current 'next'
+ next_rank.append(drainage_db['segId'].values[i])
+ # assign next using new ranking
+ new_next[i] = current_next
+ # save the current position corresponding to the new 'rank'
+ new_rank.append(i)
+ current_rank -= 1
+ # override input value to mark as completed
+ old_next[i] = 0
+ break
+ if (len(next_rank) == 0):
+ # no more IDs to process
+ first_index = -1
+ elif (not np.any(old_next == first_index)):
+ # take next rank by 'next' order
+ first_index = next_rank[0]
+ # drop that element from the list
+ del next_rank[0]
+ # deincrement the 'next' rank
+ current_next -= 1
+
+ new_rank = np.flip(new_rank)
+
+ # % reordering
+ for m in ['area', 'length', 'slope', 'lon', 'lat', 'hruId',
+ 'segId', 'hruToSegId', 'downSegId', 'width', 'manning']:
+ drainage_db[m].values = drainage_db[m].values[new_rank]
+
+ # Reorder the new 'Next'.
+ new_next = np.array(new_next)[new_rank]
+
+ # % check if channel slope values match the minimum threshold
+ min_slope = 0.000001
+ drainage_db['slope'].values[drainage_db['slope'].values < min_slope] = min_slope
+
+ # % Adding the updated Rank and Next variables to the file
+ drainage_db['Rank'] = (['n'], np.array(range(1, len(new_rank) + 1),
+ dtype = 'int32')) # ordered list from 1:NA
+ drainage_db['Rank'].attrs.update(standard_name = 'Rank',
+ long_name = 'Element ID', units = '1', _FillValue = -1)
+
+ drainage_db['Next'] = (['n'], new_next.astype('int32')) # reordered 'new_next'
+ drainage_db['Next'].attrs.update(standard_name = 'Next',
+ long_name = 'Receiving ID', units = '1', _FillValue = -1)
+
+ # % Adding missing attributes and renaming variables
+ # Add 'axis' and missing attributes for the 'lat' variable.
+ drainage_db['lat'].attrs['standard_name'] = 'latitude'
+ drainage_db['lat'].attrs['units'] = 'degrees_north'
+ drainage_db['lat'].attrs['axis'] = 'Y'
+
+ # Add 'axis' and missing attributes for the 'lon' variable.
+ drainage_db['lon'].attrs['standard_name'] = 'longitude'
+ drainage_db['lon'].attrs['units'] = 'degrees_east'
+ drainage_db['lon'].attrs['axis'] = 'X'
+
+ # Add or overwrite 'grid_mapping' for each variable (except axes).
+ for v in drainage_db.variables:
+ if (drainage_db[v].attrs.get('axis') is None):
+ drainage_db[v].attrs['grid_mapping'] = 'crs'
+
+ # Add the 'crs' itself (if none found).
+ if (drainage_db.variables.get('crs') is None):
+ drainage_db['crs'] = ([], np.int32(1))
+ drainage_db['crs'].attrs.update(grid_mapping_name = 'latitude_longitude', longitude_of_prime_meridian = 0.0, semi_major_axis = 6378137.0, inverse_flattening = 298.257223563)
+
+ # Rename variables.
+ for old, new in zip(['area', 'length', 'slope', 'manning'], ['GridArea', 'ChnlLength', 'ChnlSlope', 'R2N']):
+ drainage_db = drainage_db.rename({old: new})
+
+ # Rename the 'subbasin' dimension (from 'n').
+ drainage_db = drainage_db.rename({'n': 'subbasin'})
+
+ # % Specifying the NetCDF "featureType"
+ # Add a 'time' axis with static values set to today (in this case, time is not actually treated as a dimension).
+ drainage_db['time'] = (['subbasin'], np.zeros(len(new_rank)))
+ drainage_db['time'].attrs.update(standard_name = 'time', units = ('days since %s 00:00:00' % date.today().strftime('%Y-%m-%d')), axis = 'T')
+
+ # Set the 'coords' of the dataset to the new axes.
+ drainage_db = drainage_db.set_coords(['time', 'lon', 'lat'])
+
+ # Add (or overwrite) the 'featureType' to identify the 'point' dataset.
+ drainage_db.attrs['featureType'] = 'point'
+
+ return new_rank, drainage_db
+
+def add_gru_fractions_to_drainage_db(drainage_db, gru_fractions, fraction_type: list):
+ """add gru fraction variable and gru dimension to drainage db
+
+ drainage database is generated using :func:generate_mesh_topology
+ followed by :func:reindex_topology_file
+
+ :param drainage_db: MESH drainage database as from :func:generate_mesh_topology
+ :type drainage_db: xarray.core.dataset.Dataset
+ :param gru_fractions: _description_
+ :type gru_fractions: pandas.core.frame.DataFrame
+ :param fraction_type: list of the names of the classes used in discretization
+ :type fraction_type: list
+ :return: MESH drainage database with GRU information
+ :rtype: xarray.core.dataset.Dataset
+ """
+ hru_id = list(drainage_db.hruId.values)
+ # number of classes
+ n_classes = len(gru_fractions.columns)
+ n_grus = len(hru_id)
+ # set array
+ frac_array = np.empty((n_grus,n_classes))
+ for i,id in enumerate(hru_id):
+ frac_array[i,:] = gru_fractions.loc[str(id),:]
+
+ drainage_db["GRU"] = (["subbasin", "gru"], frac_array)
+ drainage_db['GRU'].attrs['standard_name'] = 'GRU'
+ drainage_db['GRU'].attrs['long_name'] = 'Group Response Unit'
+ drainage_db['GRU'].attrs['units'] = '-'
+ drainage_db['GRU'].attrs['_FillValue'] = -1
+
+ drainage_db["LandUse"] = (["gru"], fraction_type)
+
+ return drainage_db
+
+def reindex_forcing_file(input_forcing, drainage_db, input_basin):
+ """reindex forcing file according to rank in mesh drainage database
+
+ In the final postprocessing part of the forcing dataset, it is required to
+ reorder the forcing variables based on the remapped "Rank" IDs from the basin
+ information file "drainage_database". Because EASYMORE remaps the forcing
+ variables based on the MERIT Hydro catchment IDs (COMID), the order of
+ forcing variables may not match the order of the "Rank" variable.
+ Therefore, the fields in the remapped forcing files must be remapped to be
+ compatible with the "drainage_database" file used for MESH.
+ Three input data files are required for this process, the "drainage_databse",
+ remapped forcing files, and the MERIT Hydro catchment shapefile used in the
+ previous steps. The following section code block executes the reordering
+ operation.
+
+ :param input_forcing: basin averaged forcing generated with EASYMORE
+ :type input_forcing: xarray.Dataset
+ :param drainage_db: mesh drainage database
+ :type drainage_db: xarray.Dataset
+ :param input_basin: shapefile with the catchment IDs (COMID)
+ :type input_basin: geopandas.GeoDataframe
+ :return: reordered forcing file for MESH
+ :rtype: xarray.Dataset
+ """
+
+ # set lon and lat as coordinates, not variables
+ lon = input_forcing.variables['longitude'].values
+ lat = input_forcing.variables['latitude'].values
+ # set lon lat as coordinates so that these are not in the data variables list
+ # lon and lat are reindexed first separately
+ input_forcing = input_forcing.set_coords(['latitude','longitude'])
+ data_variables = list(input_forcing.keys())
+
+
+ # %% extract indices of forcing ids based on the drainage database
+ n = len(drainage_db.hruId)
+ ind = []
+ hruid = drainage_db.variables['hruId']
+
+ for i in range(n):
+ fid = np.where(np.int32(input_basin['COMID'].values) == hruid[i].values)[0]
+ ind = np.append(ind, fid)
+
+ ind = np.int32(ind)
+
+ # first reindex lat and lon coordinates
+ lon_reind = lon[ind]
+ lat_reind = lat[ind]
+
+ # %% reorder input forcing
+ # initialize with the first variable
+ forc_vec = xr.Dataset(
+ {
+ data_variables[0]: (["subbasin", "time"], input_forcing[data_variables[0]].values[:,ind].transpose()),
+ },
+ coords={
+ "time": input_forcing['time'].values.copy(),
+ "lon": (["subbasin"], lon_reind),
+ "lat": (["subbasin"], lat_reind),
+ }
+ )
+ # then repeat for all other variables
+ for n in data_variables[1::]:
+ forc_vec[n] = (("subbasin", "time"), input_forcing[n].values[: , ind].transpose())
+ forc_vec[n].coords["time"] = input_forcing['time'].values.copy()
+ forc_vec[n].coords["lon"] = (["subbasin"], lon_reind)
+ forc_vec[n].coords["lat"] = (["subbasin"], lat_reind)
+ forc_vec[n].attrs["units"] = input_forcing[n].units
+ forc_vec[n].attrs["grid_mapping"] = 'crs'
+ forc_vec[n].encoding['coordinates'] = 'time lon lat'
+
+ # %% update meta data attributes
+ now = datetime.now()
+ forc_vec.attrs['Conventions'] = 'CF-1.6'
+ forc_vec.attrs['License'] = 'The data were written by CWARHM'
+ forc_vec.attrs['history'] = 'Created ' + now.strftime('%Y/%m/%d %H:%M:%S')
+ forc_vec.attrs['featureType'] = 'timeSeries'
+
+ # editing lat attribute
+ forc_vec['lat'].attrs['standard_name'] = 'latitude'
+ forc_vec['lat'].attrs['units'] = 'degrees_north'
+ forc_vec['lat'].attrs['axis'] = 'Y'
+
+ # editing lon attribute
+ forc_vec['lon'].attrs['standard_name'] = 'longitude'
+ forc_vec['lon'].attrs['units'] = 'degrees_east'
+ forc_vec['lon'].attrs['axis'] = 'X'
+
+ # editing time attribute
+ forc_vec['time'].attrs['standard_name'] = 'time'
+ forc_vec['time'].attrs['axis'] = 'T'
+ forc_vec['time'].encoding['calendar'] = 'gregorian'
+ forc_vec.encoding.update(unlimited_dims = 'time')
+
+ # coordinate system
+ forc_vec['crs'] = drainage_db['crs'].copy()
+
+ # Define a variable for the points and set the 'timeseries_id' (required for some viewers).
+ forc_vec['subbasin'] = (['subbasin'], drainage_db['segId'].values.astype(np.int32).astype('S20'))
+ forc_vec['subbasin'].attrs['cf_role'] = 'timeseries_id'
+
+ return forc_vec
+
+
+class MeshClassIniFile():
+ """A python class to write the CLASS.ini file for the Land-Surface scheme "CLASS" in MESH
+
+ Attributes
+ ----------
+ filepath : str
+ path to write the ini file to
+ n_gru : int
+ number of GRU's
+ """
+ def __init__(self, filepath, n_GRU, pd_datetime_start,
+ title='Default set-up created with CWARHM', name="Bart van Osnabrugge",
+ place="University of Saskatchewan"
+ ):
+ """
+ :param filepath: path to write the ini file to
+ :type filepath: str
+ :param n_GRU: number of GRU's
+ :type n_GRU: int
+ """
+ self.filepath = filepath
+ self.n_gru = n_GRU
+ self.pd_datetime_start = pd_datetime_start
+ self.title = title
+ self.name = name
+ self.place = place
+
+ def set_header(self,title,name,place):
+ """sets first three comment lines with header information
+
+ :param title: title of the model run
+ :type title: str
+ :param name: name of the modeler
+ :type name: str
+ :param place: affiliation of modeler or modelling domain
+ :type place: str
+ """
+
+ line1 = "{:<70}".format(title)+'01 TITLE'
+ line2 = "{:<70}".format(name)+'02 NAME'
+ line3 = "{:<70}".format(place)+'03 PLACE'
+ self.header = line1+'\n'+line2+'\n'+line3+'\n'
+
+ def set_area_info(self,deglat=0.00,deglon=0.00,windspeed_ref_height=40.00,
+ temp_humid_ref_height=40.00, surface_roughness_height=50.00,
+ ground_cover_flag=-1, ILW=1, n_grid=0):
+ """sets line 4 DEGLAT/DEGLON/ZRFM/ZRFH/ZBLD/GC/ILW/NL/NM
+
+ :param deglat: Latitude of the sit or grid-cell in degrees, relevant
+ for grid version of MESH or site-specific only, otherwise indicative defaults to 0.00
+ :type deglat: float, optional
+ :param deglon: Longitude of the sit or grid-cell in degrees, , relevant
+ for grid version of MESH or site-specific only, otherwise indicativedefaults to 0.00
+ :type deglon: float, optional
+ :param windspeed_ref_height: Reference height (measurement height) for wind speed, defaults to 40.00
+ :type windspeed_ref_height: float, optional
+ :param temp_humid_ref_height: Reference height (measurement height) for temperature and humidity, defaults to 40.00
+ :type temp_humid_ref_height: float, optional
+ :param surface_roughness_height: Height into the atmosphere for aggregating surface roughness (usually in the order of 50-100 m), defaults to 50.00
+ :type surface_roughness_height: float, optional
+ :param ground_cover_flag: Ground cover flag; set to -1.0 if the GRUs in the file represent a "land surface", defaults to -1
+ :type ground_cover_flag: int, optional
+ :param ILW: Set to 1 (See the note on ILW below), defaults to 1
+ :type ILW: int, optional
+ :param n_grid: Number of grid-cells in the basin; this number must match the total number of grid-cells "TotalNumOfGrids" from the basin information file, defaults to 0
+ :type n_grid: int, optional
+ """
+ n_GRU = self.n_gru
+ line4 = '{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t'.format(
+ deglat,deglon,windspeed_ref_height,temp_humid_ref_height,
+ surface_roughness_height,ground_cover_flag,ILW,n_grid,n_GRU
+ ).expandtabs(4)
+ line4 = "{:<70}".format(line4)+'04 DEGLAT/DEGLON/ZRFM/ZRFH/ZBLD/GC/ILW/NL/NM'
+ self.area_info = line4
+
+ def GRU_part_template(self):
+ """Template of the text related to a single GRU
+
+ Template values can be replaced by replacing values encapsulated with "_"
+ """
+ self.GRU_template = \
+ '''
+ _FCAN-NL#_ _FCAN-BL#_ _FCAN-C#_ _FCAN-G#_ _FCAN-U#_ _LAMX-NL#_ _LAMX-BL#_ _LAMX-C#_ _LAMX-G#_ 05Land class type/fcanrow/pamxrow
+ _LNZ0-NL#_ _LNZ0-BL#_ _LNZ0-C#_ _LNZ0-G#_ _LNZ0-U#_ _LAMN-NL#_ _LAMN-BL#_ _LAMN-C#_ _LAMN-G#_ 06lnz0row/pamnrow
+ _ALVC-NL#_ _ALVC-BL#_ _ALVC-C#_ _ALVC-G#_ _ALVC-U#_ _CMAS-NL#_ _CMAS-BL#_ _CMAS-C#_ _CMAS-G#_ 07alvcrow/cmasrow
+ _ALIC-NL#_ _ALIC-BL#_ _ALIC-C#_ _ALIC-G#_ _ALIC-U#_ _ROOT-NL#_ _ROOT-BL#_ _ROOT-C#_ _ROOT-G#_ 08alirow/rootrow
+ _RSMN-NL#_ _RSMN-BL#_ _RSMN-C#_ _RSMN-G#_ _QA50-NL#_ _QA50-BL#_ _QA50-C#_ _QA50-G#_ 09rsmnrow/qa50row
+ _VPDA-NL#_ _VPDA-BL#_ _VPDA-C#_ _VPDA-G#_ _VPDB-NL#_ _VPDB-BL#_ _VPDB-C#_ _VPDB-G#_ 10vpdarow/vpbprow
+ _PSGA-NL#_ _PSGA-BL#_ _PSGA-C#_ _PSGA-G#_ _PSGB-NL#_ _PSGB-BL#_ _PSGB-C#_ _PSGB-G#_ 11psgarow/psgbrow
+ _DRN-#_ _SDEP-#_ _FARE-#_ _DDEN-#_ 12drnrow/sdeprow/farerow/ddenrow
+ _XSLOPE-#_ _GRKF-#_ _MANN-#_ _KSAT-#_ 1 13xslprow/grkfrow/manrow/WFCIROW/midrow
+ _SAND1-#_ _SAND2-#_ _SAND3-#_ 14sand
+ _CLAY1-#_ _CLAY2-#_ _CLAY3-#_ 15clay
+ _ORGM1-#_ _ORGM2-#_ _ORGM3-#_ 16org
+ _TBAR1-#_ _TBAR2-#_ _TBAR3-#_ _TCAN-#_ _TSNO-#_ _TPND-#_ 17temperature-soil/can/sno/pnd
+ _THLQ1-#_ _THLQ2-#_ _THLQ3-#_ _THIC1-#_ _THIC2-#_ _THIC3-#_ _ZPND-#_ 18soil moisture-soil/ice/pnd
+ _RCAN-#_ _SCAN-#_ _SNO-#_ _ALBS-#_ _RHOS-#_ _GRO-#_ 19rcan/scan/sno/albs/rho/gro
+ '''
+
+ def write_default_GRU_part(self):
+ '''Writes 15 lines for a single GRU parameters and initialization values'''
+ GRU_default_block = \
+ '''
+0.00 0.00 0.00 0.00 1.00 0.00 0.00 0.00 0.00 05 5xFCAN/4xLAMX
+0.00 0.00 0.00 0.00 0.30 0.00 0.00 0.00 0.00 06 5xLNZ0/4xLAMN
+0.00 0.00 0.00 0.00 0.09 0.00 0.00 0.00 0.00 07 5xALVC/4xCMAS
+0.00 0.00 0.00 0.00 0.15 0.00 0.00 0.00 0.00 08 5xALIC/4xROOT
+0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 09 4xRSMN/4xQA50
+0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 10 4xVPDA/4xVPDB
+0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 11 4xPSGA/4xPSGB
+0.00 1.91 1.00 7.41 12 DRN/SDEP/FARE/DD
+0.106 0.57 0.015 3.9E-05 1 Urban 13 XSLP/XDRAINH/MANN/KSAT/MID | 4F8.1, I8
+23 25 33 14 3xSAND | 3F10.1
+11 12 15 15 3xCLAY | 3F10.1
+0.00 0.00 0.00 16 3xORGM | 3F10.1
+5.00 5.00 5.00 0.00 0.00 0.00 17 3xTBAR/TCAN/TSNO/TPND | 6F10.2
+0.20 0.20 0.20 0.00 0.00 0.00 0.00 18 3xTHLQ/3xTHIC/ZPND | 7F10.3
+0.00 0.00 0.00 0.00 0.00 0.00 19 RCAN/SCAN/SNO/ALBS/RHOS/GRO | 2F10.4,F10.2,F10.3,F10.4,F10.3
+ '''
+ return GRU_default_block
+
+ def set_start_end_times(self,pd_datetime_start):
+ """Write dates to control CLASS point outputs and the start date
+
+ :param pd_datetime_start: datetime of the first time value in forcing file
+ :type pd_datetime_start: pandas.Timestamp
+ """
+ line_fill = "1\t1\t1\t1\t".expandtabs(10)
+ line20 = "{:<70}".format(line_fill)+'20 (not used, but 4x integer values are required)'
+ line21 = "{:<70}".format(line_fill)+'21 (not used, but 4x integer values are required)'
+
+ iyear = pd_datetime_start.year
+ ijday = pd_datetime_start.dayofyear
+ iminutes = pd_datetime_start.minute
+ ihour = pd_datetime_start.hour
+
+ start_time_line = "{}\t{}\t{}\t{}\t".format(ihour,iminutes,ijday,iyear).expandtabs(10)
+ start_time_line = "{:<70}".format(start_time_line)+'22 IHOUR/IMINS/IJDAY/IYEAR'
+ self.start_end_times = line20+'\n'+line21+'\n'+start_time_line
+
+
+ def build_default_ini_file(self):
+ """set_header, area_info and start_end_times with default values
+ """
+ self.set_header(self.title,self.name,self.place)
+ self.set_area_info()
+ self.set_start_end_times(self.pd_datetime_start)
+
+
+ def write_ini_file(self):
+ """write ini text to file, with n_gru default GRUs
+ """
+ with open(self.filepath,'w') as inif:
+ inif.write(self.header)
+ inif.write(self.area_info)
+ for n in range(self.n_gru):
+ inif.write(self.write_default_GRU_part())
+ inif.write(self.start_end_times)
+
+class MeshRunOptionsIniFile():
+ """Class to edit Mesh Run options and write ini file
+ """
+ def __init__(self, inifilepath, forcing_file=None) -> None:
+ """
+ Parameters
+ ----------
+ inifilepath : str
+ path to write file to
+ forcing_file : str, optional
+ path to forcing file. if set, flags are set to match , by default None
+ """
+ self.inifilepath = inifilepath
+ self.template = self.get_template()
+ self.flags = self.set_default_flags()
+ if forcing_file:
+ print('forcing setting parsed from forcing file')
+ self.set_flags_from_ff(forcing_file)
+ else:
+ print('forcing flags are set as default and need to be set manually')
+ self.write_ini_file()
+
+
+ def get_template(self):
+ template = \
+'''MESH input run options file
+##### Control Flags #####
+----#
+ $NOCF$ # Number of control flags
+SHDFILEFLAG $SHDFILEFLAG$
+BASINFORCINGFLAG $SHDFILEFLAG$ start_date=$STARTDATE$ hf=$HF$ time_shift=$TIMESHIFT$ fname=$FNAME$
+BASINSHORTWAVEFLAG name_var=$BASINSHORTWAVEFLAG$
+BASINHUMIDITYFLAG name_var=$BASINHUMIDITYFLAG$
+BASINRAINFLAG name_var=$BASINRAINFLAG$
+BASINPRESFLAG name_var=$BASINPRESFLAG$
+BASINLONGWAVEFLAG name_var=$BASINLONGWAVEFLAG$
+BASINWINDFLAG name_var=$BASINWINDFLAG$
+BASINTEMPERATUREFLAG name_var=$BASINTEMPERATUREFLAG$
+TIMESTEPFLAG $TIMESTEPFLAG$
+INPUTPARAMSFORMFLAG $INPUTPARAMSFORMFLAG$
+IDISP $IDISP$ #02 Vegetation Displacement Height Calculation | A20, I4
+IZREF $IZREF$ #03 Atmospheric Model Reference Height | A20, I4
+IPCP $IPCP$ #04 Rainfall-Snowfall Partition distribution | A20, I4
+IWF $IWF$ #08 Water Flow control | A20, I4
+FROZENSOILINFILFLAG $FROZENSOILINFILFLAG$ #22 frozen soil infiltration flag | A20, I4
+SAVERESUMEFLAG $SAVERESUMEFLAG$
+RESUMEFLAG $RESUMEFLAG$
+INTERPOLATIONFLAG $INTERPOLATIONFLAG$
+SOILINIFLAG $SOILINIFLAG$
+PBSMFLAG $PBSMFLAG$
+BASEFLOWFLAG $BASEFLOWFLAG$
+RUNMODE $RUNMODE$
+BASINBALANCEOUTFLAG $BASINBALANCEOUTFLAG$
+BASINAVGWBFILEFLAG $BASINAVGWBFILEFLAG$
+BASINAVGEBFILEFLAG $BASINAVGEBFILEFLAG$
+DIAGNOSEMODE $DIAGNOSEMODE$
+PRINTSIMSTATUS $PRINTSIMSTATUS$
+OUTFILESFLAG $OUTFILESFLAG$
+AUTOCALIBRATIONFLAG $AUTOCALIBRATIONFLAG$
+METRICSSPINUP $METRICSSPINUP$
+##### Output Grid selection #####
+----#
+ $NOGP$ #Maximum 5 points #17 Number of output grid points
+---------#---------#---------#---------#---------#
+ $GRIDNUMBOUT$ #19 Grid number
+ $GRUOUT$ #20 GRU (if applicable)
+$CLASSOUT$ #21 Output directory
+##### Output Directory #####
+---------#
+$OUTPUTDIR$ #24 Output Directory for total-basin files
+##### Simulation Run Times #####
+---#---#---#---#
+$STARTYEAR$ $STARTDAY$ $STARTHOUR$ $STARTMINUTE$ #27 Start year, day, hour, minute 2000 279
+$STOPYEAR$ $STOPDAY$ $STOPHOUR$ $STOPMINUTE$ #28 Stop year, day, hour, minute 2000 288
+'''
+ return template
+
+ def set_default_flags(self):
+ '''create dictionairy with default flags'''
+ default_flags = dict()
+ default_flags['NOCF'] = '31'
+ default_flags['SHDFILEFLAG'] = 'nc_subbasin'
+ default_flags['STARTDATE'] = '20001001'
+ default_flags['HF'] = 60
+ default_flags['TIMESHIFT'] = 0
+ default_flags['FNAME'] = 'MESH_input'
+ default_flags['BASINSHORTWAVEFLAG'] = 'FB'
+ default_flags['BASINHUMIDITYFLAG'] = 'HU'
+ default_flags['BASINRAINFLAG'] = 'PR'
+ default_flags['BASINPRESFLAG'] = 'P0'
+ default_flags['BASINLONGWAVEFLAG'] = 'FI'
+ default_flags['BASINWINDFLAG'] = 'UV'
+ default_flags['BASINTEMPERATUREFLAG'] = 'TT'
+ default_flags['TIMESTEPFLAG'] = 30
+ default_flags['INPUTPARAMSFORMFLAG'] = 'txt'
+ default_flags['IDISP'] = 0
+ default_flags['IZREF'] = 1
+ default_flags['IPCP'] = 1
+ default_flags['IWF'] = 1
+ default_flags['FROZENSOILINFILFLAG'] = 1
+ default_flags['SAVERESUMEFLAG'] = 0
+ default_flags['RESUMEFLAG'] = 0
+ default_flags['INTERPOLATIONFLAG'] = 1
+ default_flags['SOILINIFLAG'] = 1
+ default_flags['PBSMFLAG'] = 1
+ default_flags['BASEFLOWFLAG'] = 'wf_lzs'
+ default_flags['RUNMODE'] = 'runrte'
+ default_flags['BASINBALANCEOUTFLAG'] = 'none'
+ default_flags['BASINAVGWBFILEFLAG'] = 'daily'
+ default_flags['BASINAVGEBFILEFLAG'] = 'daily'
+ default_flags['DIAGNOSEMODE'] = 'on'
+ default_flags['PRINTSIMSTATUS'] = 'date_monthly'
+ default_flags['OUTFILESFLAG'] = 'off'
+ default_flags['AUTOCALIBRATIONFLAG'] = 1
+ default_flags['METRICSSPINUP'] = 366
+ default_flags['NOGP'] = 0
+ default_flags['GRIDNUMBOUT'] = 1936
+ default_flags['GRUOUT'] = 1
+ default_flags['CLASSOUT'] = 'CLASSOUT'
+ default_flags['OUTPUTDIR'] = 'output'
+ default_flags['STARTYEAR'] = 2000
+ default_flags['STARTDAY'] = 275
+ default_flags['STARTHOUR'] = 0
+ default_flags['STARTMINUTE'] = 0
+ default_flags['STOPYEAR'] = 2018
+ default_flags['STOPDAY'] = '001'
+ default_flags['STOPHOUR'] = 0
+ default_flags['STOPMINUTE'] = 0
+ return default_flags
+
+ def set_flags_from_ff(self,forcing_file):
+ flags = self.flags
+ # open forcing file
+ ds = xr.open_dataset(forcing_file)
+ # get start and end date time
+ datetimestart = pd.Timestamp(ds.time.values[0])
+ datetimeend = pd.Timestamp(ds.time.values[-1])
+ timestep_ms = ds.time.values[1]-ds.time.values[0]
+ timestep_minutes = timestep_ms.astype('timedelta64[m]').astype('int')
+ print(timestep_minutes)
+
+ # start stop flags
+ flags['STARTDATE'] = datetimestart.strftime('%Y%m%d')
+ flags['HF'] = timestep_minutes
+ flags['STARTYEAR'] = datetimestart.year
+ flags['STARTDAY'] = "{:02d}".format(datetimestart.day_of_year)
+ flags['STARTHOUR'] = datetimestart.hour
+ flags['STARTMINUTE'] = datetimestart.minute
+ flags['STOPYEAR'] = datetimeend.year
+ flags['STOPDAY'] = "{:02d}".format(datetimeend.day_of_year)
+ flags['STOPHOUR'] = datetimeend.hour
+ flags['STOPMINUTE'] = datetimeend.minute
+ # default time step is 30 min, only adjust this when
+ # forcing time step is smaller than 30 min
+ if timestep_minutes < flags['TIMESTEPFLAG']:
+ flags['TIMESTEPFLAG'] = timestep_minutes
+
+ # name flag from filename, without extension
+ flags['FNAME'] = ntpath.basename(forcing_file).split('.')[0]
+
+ self.flags = flags
+
+ def change_flag(self,flag,flag_value):
+ '''replace value in ini file with given value'''
+ flags = self.flags
+ flags[flag] = flag_value
+ self.write_ini_file()
+
+ def parse_flag_values(self):
+ '''replace all tags in template with flag values'''
+ text = self.template
+ for key, value in self.flags.items():
+ text = text.replace('$'+str(key)+'$',str(value))
+ return text
+
+ def write_ini_file(self):
+ '''parse flag values and write to file '''
+ text = self.parse_flag_values()
+ with open(self.inifilepath,'w') as setf:
+ setf.write(text)
+
+class MeshHydrologyIniFile():
+ """_summary_
+ """
+ def __init__(self, inifilepath, n_gru, mesh_setting_flags=None,
+ routing_parameters=None, gru_independent_parameters=None,
+ gru_hydrologic_parameters=None) -> None:
+ self.inifilepath = inifilepath
+ self.template = self.get_template()
+ self.flags = self.set_default_flags()
+ if mesh_setting_flags:
+ print('NOT IMPLEMENTED YET Hydrology ini file parsed with settings')
+ else:
+ print('Hydrology ini is set as default')
+ if routing_parameters:
+ print('Set routing parameters from input')
+ self.routing_parameters = routing_parameters
+ else:
+ self.routing_parameters = self.set_default_routing_parameters()
+ print('Routing parameters set as default r2n, r1n, flz, pwr (5 identical classes)')
+ if gru_independent_parameters:
+ self.gru_independent_parameters = gru_independent_parameters
+ print("set gru independent parameters from input")
+ else:
+ self.gru_independent_parameters = self.set_default_gru_independent_parameters()
+ if gru_hydrologic_parameters:
+ self.gru_hydrologic_parameters = gru_hydrologic_parameters
+ print('set gru dependent parameters from input')
+ else:
+ self.gru_hydrologic_parameters = self.set_default_gru_dependent_parameters(n_gru)
+ print('default hydrologic gru dependent parameters')
+
+ self.write_ini_file()
+
+ def get_template(self):
+ template = \
+'''2.0: MESH Hydrology Parameters (v2.0)
+!> Lines that begin with '!' are skipped as comments.
+!> All variable lines have the same free-format space delimited structure:
+!> [Variable name] [Value]
+##### Option Flags #####
+----#
+ $NOCF$ # Number of option flags.
+####### Channel routing parameters #####
+-------#
+ $NOCRPS$ # Number of channel routing parameters.
+$$$CHANNEL_ROUTING_PART$$$
+##### GRU class independent hydrologic parameters #####
+-------#
+ $NOGRUIPS$ # Number of GRU independent hydrologic parameters
+$$$GRU_INDEPENDENT_PART$$$
+##### GRU class dependent hydrologic parameters #####
+-------#
+ $NOGRUHPS$ # Number of GRU dependent hydrologic parameters.
+$$$GRU_DEPENDENT_PART$$$
+'''
+ return template
+
+ def set_default_flags(self):
+ '''create dictionairy with default flags'''
+ default_flags = dict()
+ default_flags['NOCF'] = 0
+ default_flags['NOCRPS'] = 4
+ default_flags['NOGRUIPS'] = 5
+ default_flags['NOGRUHPS'] = 13
+ return default_flags
+
+ def set_default_routing_parameters(self):
+ default_routing_parameters = pd.DataFrame(
+ data = np.array(
+ [[0.035, 0.10, 1.0E-04, 2.00] for i in range(5)]
+ ).transpose(),
+ index=['r2n','r1n','flz','pwr'],
+ columns=[str(n+1) for n in range(5)]
+ )
+ return default_routing_parameters
+
+ def parse_routing_parameters(self):
+ routing_parameters_text = self.routing_parameters.__repr__()
+ # make the header line a comment
+ all_lines = routing_parameters_text.split('\n')
+ line1 = all_lines[0]
+ line1_com = '!>\t'.expandtabs()+line1.lstrip()
+ all_lines[0] = line1_com
+ text_routing_parameters = '\n'.join(all_lines)
+ return text_routing_parameters
+
+ def set_default_gru_independent_parameters(self):
+ dgip = dict()
+ dgip['SOIL_POR_MAX'] = 0.8
+ dgip['SOIL_DEPTH'] = 4.1
+ dgip['S0'] = 1.0
+ dgip['T_ICE_LENS'] = -10.0
+ dgip['T0_ACC'] = [0]*30
+ return dgip
+
+ def parse_gru_independent_parameters(self):
+ dgip = self.gru_independent_parameters
+ lines = []
+ for key,value in dgip.items():
+ if key != 'T0_ACC':
+ lines.append(key+'\t'+str(value))
+ else:
+ T0_ACC_header = '!> YEAR\t'+'\t'.join([str(i+1) for i in range(len(value))])+'\n'
+ T0_ACC_values = 't0_ACC\t'+'\t'.join([str(a) for a in dgip['T0_ACC']])
+ T0_ACC_part = T0_ACC_header+T0_ACC_values
+ # join all parts and ensure T0_ACC part is last
+ text = '\n'.join(lines)
+ text = text+'\n'+T0_ACC_part
+ return text
+
+ def set_default_gru_dependent_parameters(self,n_gru):
+ default_grudep = pd.DataFrame(
+ data = np.array(
+ [[0,0.1,0.1,0.1,300.0,6.0,1.0,0.5,0.0,2.1,0.0,0.0,0.0] for i in range(n_gru)]
+ ).transpose(),
+ index=['IWF','ZSNL','ZPLS','ZPLG','fetch','Ht','N_S','A_S',
+ 'Distrib','FRZC','FREZTH','SWELIM','SNDENLIM'],
+ columns=[str(n+1) for n in range(n_gru)]
+ )
+ return default_grudep
+
+ def parse_parameter_dataframe(self,dataframe,header=''):
+ text = dataframe.to_string()
+ # make the header line a comment
+ all_lines = text.split('\n')
+ line1 = all_lines[0]
+ line1_com = '!>{}\t'.format(header).expandtabs(12)+line1.lstrip()
+ #line1_com = '!>\t'.expandtabs()+line1.lstrip()
+ all_lines[0] = line1_com
+ all_lines_expanded = [line.expandtabs(12) for line in all_lines]
+ text_parameters = '\n'.join(all_lines_expanded)
+ return text_parameters
+
+ def parse_setup(self):
+ '''replace all tags in template with flag values'''
+ text = self.template
+ # first replace all flags
+ for key, value in self.flags.items():
+ text = text.replace('$'+str(key)+'$',str(value))
+ # then set the $$$ text blogs
+ #text = text.replace('$$$CHANNEL_ROUTING_PART$$$',self.parse_routing_parameters()+'\n')
+ text = text.replace('$$$CHANNEL_ROUTING_PART$$$',self.parse_parameter_dataframe(self.routing_parameters,'NRVR')+'\n')
+ text = text.replace('$$$GRU_INDEPENDENT_PART$$$',self.parse_gru_independent_parameters()+'\n')
+ text = text.replace('$$$GRU_DEPENDENT_PART$$$',self.parse_parameter_dataframe(self.gru_hydrologic_parameters,'GRU')+'\n')
+ return text
+
+ def write_ini_file(self):
+ text = self.parse_setup()
+ with open(self.inifilepath,'w') as setf:
+ setf.write(text)
+
+class MeshReservoirTxtFile():
+ '''This file contains information about the controlled and natural reservoirs
+ or lakes in the basin.
+
+ Two types of reservoirs are supported in Standalone MESH. The first is the
+ controlled reservoir, which replaces modelled streamflow with values from
+ this file. The second is the natural reservoir, or lake, which allows a
+ power curve to regulate release from an outlet location. A third option,
+ a polynomial release curve, is implemented in test versions of the model.
+
+ This file is required by Standalone MESH, even if the watershed contains no
+ reservoirs. If reservoirs are not required, a dummy file with fictitious data
+ on the first line must exist, which follows the appropriate formatting rules.
+ The following line can be used to create the dummy file. The first number is
+ the number of reservoirs, and should be set to zero.
+
+ TODO: currently only gives the dummy file
+ '''
+ def __init__(self,inifilepath) -> None:
+ self.inifilepath = inifilepath
+ self.template = self.get_template()
+ self.write_ini_file()
+
+ def get_template(self):
+ template = '''0 0 0'''
+ return template
+
+ def parse_setup(self):
+ '''replace all tags in template with flag values'''
+ text = self.template
+ return text
+
+ def write_ini_file(self):
+ text = self.parse_setup()
+ with open(self.inifilepath,'w') as setf:
+ setf.write(text)
+
+class MeshSoilLevelTxtFile():
+ '''MESH_input_soil_levels.txt describes the depth in meters of connected
+ soil layers in the soil profile.
+
+ It is similar to the Soil_3lev file used by the CLASS
+ Stand-Alone Driver. The first layer is the surface layer, which can be no
+ less than 10 cm in depth. A minimum of three soil layers are required
+ in the file.
+ '''
+ def __init__(self,inifilepath,soil_layers=None) -> None:
+ self.inifilepath = inifilepath
+ self.template = self.get_template()
+ self.flags = self.set_default_flags()
+ if soil_layers:
+ self.soil_layers = soil_layers
+ print("setting soil layers from soil layers input")
+ else:
+ self.soil_layers = self.set_default_soil_layers()
+ print("setting three default soil layers")
+ self.write_ini_file()
+
+ def get_template(self):
+ template = ''''''
+ return template
+
+ def set_default_flags(self):
+ self.flags = None
+
+ def set_default_soil_layers(self):
+ default_soil_layers = pd.DataFrame(
+ data = np.array(
+ [[0.10, 0.25, 3.75],[0.10, 0.35, 4.10]]
+ ).transpose(),
+ columns=['DELZ','ZBOT']
+ )
+ return default_soil_layers
+
+
+ def parse_setup(self):
+ '''replace all tags in template with flag values'''
+ text = self.template # template is empty here
+ lines = self.soil_layers.to_string(header=False,index=False,col_space=8).split('\n')
+ formatted_lines = ['{}\t#DELZ/ZBOT'.format(line) for line in lines]
+ text = '\n'.join(formatted_lines)
+ return text
+
+ def write_ini_file(self):
+ text = self.parse_setup()
+ with open(self.inifilepath,'w') as setf:
+ setf.write(text)
+
+class MeshInputStreamflowTxtFile():
+ '''This file contains measured streamflow values for gauged locations
+
+ It can also be used to specify locations where streamflow values will
+ be output even if no gauge actually exists at that location.
+
+
+ At a minimum, every configuration of Standalone MESH must contain at least one
+ streamflow gauge location. Even if the watershed does not contain an actual
+ streamflow gauge, one must still be included in the streamflow file. It is also
+ important that at least one gauge in the file has a measured value greater than
+ zero during the first time step. This value is used to initialize the flow in
+ the stream network. This value must be greater than zero and must be included
+ even if the watershed contains no actual gauges with measured data.
+
+ For more information see https://wiki.usask.ca/display/MESH/MESH_input_streamflow.txt
+ '''
+ def __init__(self,inifilepath,streamflow=None,forcing_file=None) -> None:
+ self.inifilepath = inifilepath
+ self.template = self.get_template()
+ self.flags = self.set_default_flags()
+ if streamflow:
+ self.streamflow = streamflow
+ print("setting streamflow from streamflow input")
+ self.set_flags_from_streamflow()
+ print("setting header flags from streamflow input")
+ else:
+ if forcing_file:
+ self.streamflow = self.match_streamflow_with_forcing(forcing_file)
+ self.set_flags_from_streamflow()
+ print("setting default dummy streamflow that matches forcing length")
+ else:
+ self.streamflow = self.set_default_streamflow()
+ self.set_flags_from_streamflow() # to make sure compatibality
+ print("no input, default streamflow set - manual intervention needed")
+
+ self.write_ini_file()
+
+ def get_template(self):
+ '''The first line of the file is a comment line. Immediately following
+ this line, in the second line in the file, is the header information.
+ The header contains the number of streamflow gauge locations, the
+ starting date of the record (for all gauges), and the uniform
+ time-stepping of the records. It should also contain the number of
+ records in the file, but this value is not used. The simulation will run
+ until it reaches the user-specified stopping date, runs out of
+ meteorological input data, or runs out of streamflow records to
+ read from this file.'''
+ template = \
+'''# MESH streamflow record input file 01
+$WF_NO$ $WF_NL$ $WF_MHRD$ $WF_KT$ $WF_START_YEAR$ $WF_START_DAY$ $WF_START_HOUR$ 02 WF_NO/WF_NL/WF_MHRD/WF_KT/WF_START_YEAR/WF_START_DAY/WF_START_HOUR'''
+ return template
+
+ def set_default_flags(self):
+ default_flag = dict()
+ default_flag['WF_NO'] = 1
+ default_flag['WF_NL'] = 0 #obsolete
+ default_flag['WF_MHRD'] = 0 #obsolete
+ default_flag['WF_KT'] = 24
+ default_flag['WF_START_YEAR'] = 2000
+ default_flag['WF_START_DAY'] = 1
+ default_flag['WF_START_HOUR'] = 1
+ return default_flag
+
+ def set_default_streamflow(self):
+ streamflow_xr = xr.Dataset(
+ {
+ 'streamflow': (["gauge","time"],np.array([[10]*10]))
+ },
+ coords={
+ "time":pd.date_range(start='2000-01-01',periods=10,freq='D'),
+ "id": (["gauge"], ['default_gauge']),
+ "lon": (["gauge"], [0.00]),
+ "lat": (["gauge"], [0.00]),
+ },
+ )
+ # update metadata
+ streamflow_xr.attrs['Conventions'] = 'CF-1.6'
+ streamflow_xr.attrs['history'] = 'Example default input for MeshStreamflowTxt()'
+ streamflow_xr.attrs['featureType'] = 'timeSeries'
+ return streamflow_xr
+
+ def set_flags_from_streamflow(self):
+ flags = self.flags
+ sf = self.streamflow
+
+ # calculate timestep
+ datetimestart = pd.Timestamp(sf.time.values[0])
+ timestep_ms = sf.time.values[1]-sf.time.values[0]
+ timestep_hr = timestep_ms.astype('timedelta64[h]').astype('int')
+ # set flags
+ flags['WF_NO'] = sf.dims['gauge']
+ flags['WF_KT'] = timestep_hr
+ flags['WF_START_YEAR'] = datetimestart.year
+ flags['WF_START_DAY'] = datetimestart.day_of_year
+ flags['WF_START_HOUR'] = datetimestart.hour
+ self.flags = flags
+
+ def match_streamflow_with_forcing(self,forcing_dataset):
+ fds = forcing_dataset
+ empty_streamflow_array = np.array([[-1]*len(fds.time)])
+ empty_streamflow_array[0,0] = 10
+ streamflow_xr = xr.Dataset(
+ {
+ 'streamflow': (["gauge","time"],empty_streamflow_array)
+ },
+ coords={
+ "time": fds.time,
+ "id": (["gauge"], ['default_gauge']),
+ "lon": (["gauge"], [0.00]),
+ "lat": (["gauge"], [0.00]),
+ },
+ )
+ # update metadata
+ streamflow_xr.attrs['Conventions'] = 'CF-1.6'
+ streamflow_xr.attrs['history'] = 'Example default input for MeshStreamflowTxt()'
+ streamflow_xr.attrs['featureType'] = 'timeSeries'
+ return streamflow_xr
+
+
+ def parse_setup(self):
+ '''replace all tags in template with flag values'''
+ text_header = self.template # this is a header only
+ # first set header flags
+ for key, value in self.flags.items():
+ text_header = text_header.replace('$'+str(key)+'$',str(value))
+ # next write gauge header lines from streamflow info
+ gauge_headers = []
+ for g in range(self.streamflow.dims['gauge']):
+ gauge_info = self.streamflow.isel(gauge=g)
+ wf_iy = str(gauge_info.lat.values)
+ wf_jx = str(gauge_info.lon.values)
+ wf_gage = str(gauge_info.id.values)
+ line = "{}\t{}\t{:<12}".format(wf_iy,wf_jx,wf_gage[:12])
+ gauge_headers.append(line)
+ gauge_header = '\n'.join(gauge_headers)
+ # last parse streamflow data
+ streamflow_values = []
+ for g in range(self.streamflow.dims['gauge']):
+ gauge_info = self.streamflow.isel(gauge=g)
+ streamflow_values.append(gauge_info.streamflow.values)
+ dfs = pd.DataFrame(np.array(streamflow_values).transpose())
+ stream_values_text = dfs.to_string(header=False,index=False)
+
+ text = '\n'.join([text_header,gauge_header,stream_values_text])
+ return text
+
+ def write_ini_file(self):
+ text = self.parse_setup()
+ with open(self.inifilepath,'w') as setf:
+ setf.write(text)
+
+class MeshMinMaxParameterTxtFile():
+ '''This file is recently included to check if parameter values lie within specified ranges
+ so as to avoid model crash problems that will be caused by unrealistic parameter values.
+
+ This file is currently static
+ '''
+ def __init__(self,inifilepath) -> None:
+ """Initialize file
+
+ Parameters
+ ----------
+ inifilepath : str
+ path to file to write
+ """
+ self.inifilepath = inifilepath
+ self.template = self.get_template()
+ self.write_ini_file()
+
+ def get_template(self):
+ template = \
+'''Reserved 1 - THEXTRA !ROW 1
+0.0000 !min
+0.0001 !max
+Reserved 2 - ICE_INDEX !ROW 2
+0.0000 !min
+0.0001 !max
+Reserved 3 - GWSCALE !ROW 3
+0.0000 !min
+0.0001 !max
+River roughness factor (WF_R2) (5 classes maximum) !ROW 4
+0.0200 !min
+2.0000 !max
+WF_R2 - CLASS 2 !ROW 5
+0.0200 !min
+2.0000 !max
+WF_R2 - CLASS 3 !ROW 6
+0.0200 !min
+2.0000 !max
+WF_R2 - CLASS 4 !ROW 7
+0.0200 !min
+2.0000 !max
+WF_R2 - CLASS 5 !ROW 8
+0.0200 !min
+2.0000 !max
+maximum soil porosity !ROW 9
+0.0000 !min
+1.0000 !max
+depth from surface to bottom of rooting zone for maximum water holding capacity, m !ROW 10
+0.0000 !min
+4.1000 !max
+Surface saturations [0.75 - 1.0] !ROW 11
+0.0000 !min
+1.0000 !max
+Overnight minimum to cause ice lens after major melt -[50 - 0.0 �C] !ROW 12
+-50.00 !min
+0.0000 !max
+DRNROW - DRAINAGE INDEX, CALCULATED DRAINAGE IS MULTIPLIED BY THIS VALUE !ROW 13
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min DRNROW
+1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 !max
+SDEPROW - THE PERMEABLE DEPTH OF THE SOIL COLUMN !ROW 14
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min SDEPROW
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+FAREROW - WHEN RUNNING A MOSAIC, THE FRACTIONAL AREA THAT THIS TILE REPRESENTS IN A GRID CELL !ROW 15
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min FAREROW
+5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 !max
+DDENROW - THE DRAINAGE DENSITY OF THE GRU IN m/m2 !ROW 16
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min DDENROW
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+XSLPROW - AVERAGE OVERLAND SLOPE OF A GIVEN GRU !ROW 17
+0.0001 0.0001 0.0001 0.0001 0.0001 0.0001 0.0001 0.0001 0.0001 0.0001 0.0001 0.0001 0.0001 0.0001 0.0001 0.0001 0.0001 0.0001 0.0001 0.0001 0.0001 0.0001 !min XSLPROW
+1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 !max
+XDNROW - HORIZONTAL CONDUCTIVITY AT A DEPTH OF h0 DIVIDED BY HORIZONTAL CONDUCTIVITY AT SURFACE !ROW 18
+0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 !min XDNROW
+1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 !max
+MANNROW - MANNING ROUGHNESS COEFFICIENT !ROW 19
+0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 0.0010 !min MANNROW
+2.0000 2.0000 2.0000 2.0000 2.0000 2.0000 2.0000 2.0000 2.0000 2.0000 2.0000 2.0000 2.0000 2.0000 2.0000 2.0000 2.0000 2.0000 2.0000 2.0000 2.0000 2.0000 !max
+KSROW - HORIZONTAL CONDUCTIVITY AT SURFACE !ROW 20
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min KSROW
+1.0200 1.0200 1.0200 1.0200 1.0200 1.0200 1.0200 1.0200 1.0200 1.0200 1.0200 1.0200 1.0200 1.0200 1.0200 1.0200 1.0200 1.0200 1.0200 1.0200 1.0200 1.0200 !max
+SANDROW - PERCENTAGES OF SAND CONTENT OF SOIL LAYER 1 !ROW 21
+-5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 !min % OF SAND not organic in soil layer 1
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+CLAYROW - PERCENTAGES OF CLAY CONTENT OF SOIL LAYER 1 !ROW 22
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min % OF CLAY not organic or sand in soil layer 1
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+ORGMROW - PERCENTAGES OF ORGANIC MATTER OF SOIL LAYER 1 !ROW 23
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min % OF ORGANIC in soil layer 1
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+SANDROW - PERCENTAGES OF SAND CONTENT OF SOIL LAYER 2 !ROW 24
+-5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 !min % OF SAND not organic in soil layer 2
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+CLAYROW - PERCENTAGES OF CLAY CONTENT OF SOIL LAYER 2 !ROW 25
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min % OF CLAY not organic or sand in soil layer 2
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+ORGMROW - PERCENTAGES OF ORGANIC MATTER OF SOIL LAYER 2 !ROW 26
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min % OF ORGANIC in soil layer 2
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+SANDROW - PERCENTAGES OF SAND CONTENT OF SOIL LAYER 3 !ROW 27
+-5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 -5.000 !min % OF SAND not organic in soil layer 3
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+CLAYROW - PERCENTAGES OF CLAY CONTENT OF SOIL LAYER 3 !ROW 28
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min % OF CLAY not organic or sand in soil layer 3
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+ORGMROW - PERCENTAGES OF ORGANIC MATTER OF SOIL LAYER 3 !ROW 29
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min % OF ORGANIC in soil layer 3
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+ZSNLROW - LIMITING SNOW DEPTH BELOW WHICH COVERAGE IS < 100% !ROW 30
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min ZSNLROW **From MESH_parameters_hydrology.ini
+5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 5.0000 !max
+ZPLSROW - MAXIMUM WATER PONDING DEPTH FOR SNOW-COVERED AREAS !ROW 31
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min ZPLSROW
+1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 !max
+ZPLGROW - MAXIMUM WATER PONDING DEPTH FOR SNOW-FREE AREAS !ROW 32
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min ZPLGROW
+1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 1.0000 !max
+FZRCROW - COEFFICIENT FOR THE FROZEN SOIL INFILTRATION PARAMETERIC EQUATION !ROW 33
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min FZRCROW
+3.0000 3.0000 3.0000 3.0000 3.0000 3.0000 3.0000 3.0000 3.0000 3.0000 3.0000 3.0000 3.0000 3.0000 3.0000 3.0000 3.0000 3.0000 3.0000 3.0000 3.0000 3.0000 !max
+LNZ0ROW - NATURAL LOGARITHM OF THE ROUGHNESS LENGTH FOR LAND COVER CATEGORY 1 !ROW 34
+-20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 !min LNZ0ROW1 Column 1 **Atmospheric parameters from MESH_parameters_CLASS.INI
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+ALVCROW - VISIBLE ALBEDO FOR LAND COVER CATEGORY 1 !ROW 35
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min ALVCROW1
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+ALICROW - NEAR INFRARED ALBEDO FOR LAND COVER CATEGORY 1 !ROW 36
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min ALICROW1
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+RSMNROW - MINIMUM STOMATAL RESISTANCE FOR THE VEGETATION TYPE 1 !ROW 37
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min RSMNROW1
+1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 !max
+VPDAROW - COEFFICIENT GOVERNING THE RESPONSE OF STOMATES TO VAPOR PRESSURE DEFICIT - COMMON VALUE 0.5 !ROW 38
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min VPDAROW1
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+PSGAROW - COEFFICIENT GOVERNING THE RESPONSE OF STOMATES TO SOIL WATER SUCTION -OMMON VALUE 100 !ROW 39
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min PSGAROW1
+1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 !max
+LNZ0ROW - NATURAL LOGARITHM OF THE ROUGHNESS LENGTH FOR LAND COVER CATEGORY 2 !ROW 40
+-20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 !min LNZ0ROW2 Column 2 **Atmospheric parameters from MESH_parameters_CLASS.INI
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+ALVCROW - VISIBLE ALBEDO FOR LAND COVER CATEGORY 2 !ROW 41
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min ALVCROW2
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+ALICROW - NEAR INFRARED ALBEDO FOR LAND COVER CATEGORY 2 !ROW 42
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min ALICROW2
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+RSMNROW - MINIMUM STOMATAL RESISTANCE FOR THE VEGETATION TYPE 2 !ROW 43
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min RSMNROW2
+1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 !max
+VPDAROW - COEFFICIENT GOVERNING THE RESPONSE OF STOMATES TO VAPOR PRESSURE DEFICIT - COMMON VALUE 0.5 !ROW 44
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min VPDAROW2
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+PSGAROW - COEFFICIENT GOVERNING THE RESPONSE OF STOMATES TO SOIL WATER SUCTION -OMMON VALUE 100 !ROW 45
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min PSGAROW2
+1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 !max
+LNZ0ROW - NATURAL LOGARITHM OF THE ROUGHNESS LENGTH FOR LAND COVER CATEGORY 3 !ROW 46
+-20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 !min LNZ0ROW3 Column 3 **Atmospheric parameters from MESH_parameters_CLASS.INI
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+ALVCROW - VISIBLE ALBEDO FOR LAND COVER CATEGORY 3 !ROW 47
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min ALVCROW3
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+ALICROW - NEAR INFRARED ALBEDO FOR LAND COVER CATEGORY 3 !ROW 48
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min ALICROW3
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+RSMNROW - MINIMUM STOMATAL RESISTANCE FOR THE VEGETATION TYPE 3 !ROW 49
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min RSMNROW3
+1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 !max
+VPDAROW - COEFFICIENT GOVERNING THE RESPONSE OF STOMATES TO VAPOR PRESSURE DEFICIT - COMMON VALUE 0.5 !ROW 50
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min VPDAROW3
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+PSGAROW - COEFFICIENT GOVERNING THE RESPONSE OF STOMATES TO SOIL WATER SUCTION -OMMON VALUE 100 !ROW 51
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min PSGAROW3
+1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 !max
+LNZ0ROW - NATURAL LOGARITHM OF THE ROUGHNESS LENGTH FOR LAND COVER CATEGORY 4 !ROW 52
+-20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 !min LNZ0ROW4 Column 4 **Atmospheric parameters from MESH_parameters_CLASS.INI
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+ALVCROW - VISIBLE ALBEDO FOR LAND COVER CATEGORY 4 !ROW 53
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min ALVCROW4
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+ALICROW - NEAR INFRARED ALBEDO FOR LAND COVER CATEGORY 4 !ROW 54
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min ALICROW4
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+RSMNROW - MINIMUM STOMATAL RESISTANCE FOR THE VEGETATION TYPE 4 !ROW 55
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min RSMNROW4
+1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 !max
+VPDAROW - COEFFICIENT GOVERNING THE RESPONSE OF STOMATES TO VAPOR PRESSURE DEFICIT - COMMON VALUE 0.5 !ROW 56
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min VPDAROW4
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+PSGAROW - COEFFICIENT GOVERNING THE RESPONSE OF STOMATES TO SOIL WATER SUCTION -OMMON VALUE 100 !ROW 57
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min PSGAROW4
+1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 !max
+LNZ0ROW - NATURAL LOGARITHM OF THE ROUGHNESS LENGTH FOR LAND COVER CATEGORY 5 !ROW 58
+-20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 -20.00 !min LNZ0ROW5 Column 5 **Atmospheric parameters from MESH_parameters_CLASS.INI
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+ALVCROW - VISIBLE ALBEDO FOR LAND COVER CATEGORY 5 !ROW 59
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min ALVCROW5
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+ALICROW - NEAR INFRARED ALBEDO FOR LAND COVER CATEGORY 5 !ROW 60
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min ALICROW5
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+LAMXROW - MAXIMUM LEAF AREA INDEX FOR THE VEGETATION TYPE 1 !ROW 61
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min LAMXROW1 Column 6
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+LAMNROW - MINIMUM LEAF AREA INDEX FOR THE VEGETATION TYPE 1 !ROW 62
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min LAMNROW1
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+CMASROW - ANNUAL MAXIMUM CANOPY MASS FOR VEGETATION TYPE 1 [kg m-2] !ROW 63
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min CMASROW1
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+ROOTROW - ROOTING DEPTH FOR VEGETATION TYPE 1 !ROW 64
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min ROOTROW1
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+QA50ROW - COEFFICIENT GOVERNING THE RESPONSE OF STOMATES TO LIGHT, COMMON VALUES - 30 TO 50 W/M2 !ROW 65
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min QA50ROW1
+1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 !max
+VPDBROW - COEFFICIENT GOVERNING THE RESPONSE OF STOMATES TO VAPOR PRESSURE DEFICIT, COMMON VALUES - 1.0 !ROW 66
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min VPDBROW1
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+PSGBROW - COEFFICIENT GOVERNING THE RESPONSE OF STOMATES TO SOIL WATER SUCTION, COMMON VALUES - 5 !ROW 67
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min PSGBROW1
+1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 !max
+LAMXROW - MAXIMUM LEAF AREA INDEX FOR THE VEGETATION TYPE 2 !ROW 68
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min LAMXROW2 Column 7
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+LAMNROW - MINIMUM LEAF AREA INDEX FOR THE VEGETATION TYPE 2 !ROW 69
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min LAMNROW2
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+CMASROW - ANNUAL MAXIMUM CANOPY MASS FOR VEGETATION TYPE 2 [kg m-2] !ROW 70
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min CMASROW2
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+ROOTROW - ROOTING DEPTH FOR VEGETATION TYPE 2 !ROW 71
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min ROOTROW2
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+QA50ROW - COEFFICIENT GOVERNING THE RESPONSE OF STOMATES TO LIGHT, COMMON VALUES - 30 TO 50 W/M2 !ROW 72
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min QA50ROW2
+1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 !max
+VPDBROW - COEFFICIENT GOVERNING THE RESPONSE OF STOMATES TO VAPOR PRESSURE DEFICIT, COMMON VALUES - 1.0 !ROW 73
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min VPDBROW2
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+PSGBROW - COEFFICIENT GOVERNING THE RESPONSE OF STOMATES TO SOIL WATER SUCTION, COMMON VALUES - 5 !ROW 74
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min PSGBROW2
+1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 !max
+LAMXROW - MAXIMUM LEAF AREA INDEX FOR THE VEGETATION TYPE 3 !ROW 75
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min LAMXROW3 Column 8
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+LAMNROW - MINIMUM LEAF AREA INDEX FOR THE VEGETATION TYPE 3 !ROW 76
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min LAMNROW3
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+CMASROW - ANNUAL MAXIMUM CANOPY MASS FOR VEGETATION TYPE 3 [kg m-2] !ROW 77
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min CMASROW3
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+ROOTROW - ROOTING DEPTH FOR VEGETATION TYPE 3 !ROW 78
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min ROOTROW3
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+QA50ROW - COEFFICIENT GOVERNING THE RESPONSE OF STOMATES TO LIGHT, COMMON VALUES - 30 TO 50 W/M2 !ROW 79
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min QA50ROW3
+1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 !max
+VPDBROW - COEFFICIENT GOVERNING THE RESPONSE OF STOMATES TO VAPOR PRESSURE DEFICIT, COMMON VALUES - 1.0 !ROW 80
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min VPDBROW3
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+PSGBROW - COEFFICIENT GOVERNING THE RESPONSE OF STOMATES TO SOIL WATER SUCTION, COMMON VALUES - 5 !ROW 81
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min PSGBROW3
+1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 !max
+LAMXROW - MAXIMUM LEAF AREA INDEX FOR THE VEGETATION TYPE 4 !ROW 82
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min LAMXROW4 Column 9
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+LAMNROW - MINIMUM LEAF AREA INDEX FOR THE VEGETATION TYPE 4 !ROW 83
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min LAMNROW4
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+CMASROW - ANNUAL MAXIMUM CANOPY MASS FOR VEGETATION TYPE 4 [kg m-2] !ROW 84
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min CMASROW4
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+ROOTROW - ROOTING DEPTH FOR VEGETATION TYPE 4 !ROW 85
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min ROOTROW4
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+QA50ROW - COEFFICIENT GOVERNING THE RESPONSE OF STOMATES TO LIGHT, COMMON VALUES - 30 TO 50 W/M2 !ROW 86
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min QA50ROW4
+1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 !max
+VPDBROW - COEFFICIENT GOVERNING THE RESPONSE OF STOMATES TO VAPOR PRESSURE DEFICIT, COMMON VALUES - 1.0 !ROW 87
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min VPDBROW4
+100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 100.00 !max
+PSGBROW - COEFFICIENT GOVERNING THE RESPONSE OF STOMATES TO SOIL WATER SUCTION, COMMON VALUES - 5 !ROW 88
+0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 0.0000 !min PSGBROW4
+1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 1000.0 !max
+'''
+ return template
+
+ def parse_setup(self):
+ return self.template
+
+ def write_ini_file(self):
+ text = self.parse_setup()
+ with open(self.inifilepath,'w') as setf:
+ setf.write(text)
+
+class MeshParameterTxtFile():
+ '''MESH_parameters.txt is an optional input file, which can be used
+ to replace the traditional INI format parameter files, and allows
+ specifying additional parameters, variables, and options not
+ supported by the legacy file formats. The file uses free-formatting,
+ allows block and in-line comments, and uses a generalized structure
+ for greater flexibility, which includes the use of names to identify
+ parameters, variables, and options.
+
+ The structure of MESH_parameters.txt is free-format and does not contain
+ identified sections. Parameters, variables, and options are listed by line,
+ with corresponding values and settings listed in the same line as the
+ parameter, variable, or option name key.
+ '''
+ def __init__(self,inifilepath,parameter_values=None) -> None:
+ """Initialize file
+
+ Parameters
+ ----------
+ inifilepath : str
+ path to file to write
+ parameter_values: pandas.core.frame.DataFrame
+ parameter values. First column VARIABLE NAMES, second column (or more) VARIABLE values
+ comments can be added by adding '!>' as Variable name.
+ No values should be '' and example is:
+ pardf = pd.DataFrame(
+ np.array([['!>','PARA','PARB','!>','PARC'],
+ ['test comment',32,0.45,'comment two','txt'],
+ ['','','0.8','','']]).transpose()
+ )
+ """
+ self.inifilepath = inifilepath
+ if isinstance(parameter_values,pd.core.frame.DataFrame):
+ self.flags = parameter_values
+ print('Parameter values read from input')
+ else:
+ self.flags = self.set_dummy_content()
+ print('No parameter values read - create dummy file')
+
+ self.write_ini_file()
+
+ def set_dummy_content(self):
+ pardf = pd.DataFrame(np.array(
+ [['!>','!>'],
+ ['No parameters given','Dummy file generated']]).transpose()
+ )
+ return pardf
+
+ def parse_setup_dict(self):
+ # start with header
+ comment_parameter_lines = ['!> TXT (free-format) MESH configuration file.']
+ for parameter, valcom in self.flags.items():
+ # check if comment is included
+ if isinstance(valcom,tuple):
+ # change type to list, to have uniform behavior
+ # for single values and lists
+ value = list(valcom[0])
+ # next try for comment
+ comment = valcom[1]
+ comment_line = '!>\t{}'.format(comment)
+ comment_parameter_lines.append(comment_line)
+ else:
+ value = list(valcom)
+ value_line = '{}\t{}'.format(parameter,'\t'.join([str(v) for v in value]))
+ comment_parameter_lines.append(value_line)
+ text = '\n'.join(comment_parameter_lines)
+
+ return text
+
+ def parse_setup_df(self):
+ text_header = '!> TXT (free-format) MESH configuration file.\n'
+ par_text = self.flags.to_string(header=False,index=False,justify='left')
+ return text_header + par_text
+
+ def write_ini_file(self):
+ text = self.parse_setup_df()
+ with open(self.inifilepath,'w') as setf:
+ setf.write(text)
+
+
+class MeshOutputTxtFile():
+ pass
\ No newline at end of file
diff --git a/cwarhm/model_specific_processing/mizuroute.py b/cwarhm/model_specific_processing/mizuroute.py
new file mode 100644
index 0000000..67e83ca
--- /dev/null
+++ b/cwarhm/model_specific_processing/mizuroute.py
@@ -0,0 +1,290 @@
+''' functions related to mizuroute
+'''
+
+import pandas as pd
+import netCDF4 as nc4
+import geopandas as gpd
+import numpy as np
+from datetime import datetime
+import os, sys
+import easymore.easymore as esmr
+import itertools
+
+
+# Function to create new nc variables
+def create_and_fill_nc_var(ncid, var_name, var_type, dim, fill_val, fill_data, long_name, units):
+ """add a variable var_name to nc4 object ncid
+
+ :param ncid: nc object to add variable to
+ :type ncid: nc4.Dataset
+ :param var_name: variable name
+ :type var_name: str
+ :param var_type: type of variable
+ :type var_type: str
+ :param dim: dimension of variable
+ :type dim: str
+ :param fill_val: True or False
+ :type fill_val: bool
+ :param fill_data: data of the variable
+ :type fill_data: as :param:var_type
+ :param long_name: long name
+ :type long_name: str
+ :param units: units
+ :type units: str
+ """
+ # Make the variable
+ ncvar = ncid.createVariable(var_name, var_type, (dim,), fill_val)
+ # Add the data
+ ncvar[:] = fill_data
+ # Add meta data
+ ncvar.long_name = long_name
+ ncvar.unit = units
+ return
+
+def enforce_outlets_from_control(shp_river, river_outlet_ids, river_seg_id, river_down_seg_id):
+ """Set basin ids specified as outlet to mizuroute outlet
+
+ Ensure that any segments specified in the control file are identified to
+ mizuRoute as outlets, by setting the downstream segment to 0
+ This indicates to mizuRoute that this segment has no downstream segment attached to it;
+ i.e. is an outlet
+
+ :param shp_river: river shapefile
+ :type shp_river: file path .shp
+ :param river_outlet_ids: river_seg_ids that need to be set as outlet, comma seperated for multiple
+ :type river_outlet_ids: str
+ :param river_seg_id: Name of the segment ID column
+ :type river_seg_id: str
+ :param river_down_seg_id: Name of the downstream segment ID column
+ :type river_down_seg_id: str
+ """
+ # Set flag and convert variable type if needed
+ if 'n/a' in river_outlet_ids:
+ enforce_outlets = False
+ else:
+ enforce_outlets = True
+ river_outlet_ids = river_outlet_ids.split(',') # does nothing if string contains no comma
+ river_outlet_ids = [int(outlet_id) for outlet_id in river_outlet_ids]
+
+ if enforce_outlets:
+ for outlet_id in river_outlet_ids:
+ if any(shp_river[river_seg_id] == outlet_id):
+ shp_river.loc[shp_river[river_seg_id] == outlet_id, river_down_seg_id] = 0
+ else:
+ print('outlet_id {} not found in {}'.format(outlet_id,river_seg_id))
+
+def enforce_outlets_by_max_upstream_area(shp_river, river_uparea, river_seg_id, river_down_seg_id):
+ """Set mizuroute outlet based on maximum upstream area
+
+ Ensure that any segments specified in the control file are identified to
+ mizuRoute as outlets, by setting the downstream segment to 0
+ This indicates to mizuRoute that this segment has no downstream segment attached to it;
+ i.e. is an outlet
+
+ :param shp_river: river shapefile
+ :type shp_river: file path .shp
+ :param river_uparea: Name of the upstream area column
+ :type river_uparea: str
+ :param river_seg_id: Name of the segment ID column
+ :type river_seg_id: str
+ :param river_down_seg_id: Name of the downstream segment ID column
+ :type river_down_seg_id: str
+ """
+ river_outlet_id = shp_river[river_seg_id].loc[shp_river[river_uparea].argmax()]
+ shp_river.loc[shp_river[river_seg_id] == river_outlet_id, river_down_seg_id] = 0
+
+
+
+def generate_mizuroute_topology(infile_river_shp, infile_basin_shp, outfile_topology_nc, river_outlet_ids,
+ basin_hru_id = 'COMID', basin_hru_to_seg = 'hru_to_seg', basin_hru_area = 'area',
+ river_seg_id = 'COMID', river_down_seg_id = 'NextDownID', river_slope = 'slope',
+ river_length = 'length' , fake_river=False):
+ """generate mizuroute topology .nc file
+
+ The network topology contains information about the stream network and the routing basins the network is in. These include:
+
+ 1. Unique indices of the stream segment;
+ 2. Unique indices of the routing basins (HRUs; equivalent to SUMMA GRUs in this setup);
+ 3. ID of the stream segment each individual segment connects to (should be 0 or negative number to indicate that segment is an outlet);
+ 4. ID of the stream segment a basin drains into;
+ 5. Basin area;
+ 6. Segment slope;
+ 7. Segment length.
+
+ Values for these settings are taken from the user's shapefiles. See: https://mizuroute.readthedocs.io/en/master/Input_data.html
+
+ :param infile_river_shp: path to river shapefile
+ :type infile_river_shp: file path .shp
+ :param infile_basin_shp: path to basin shapefile
+ :type infile_basin_shp: file path .shp
+ :param outfile_topology_nc: path to save output netCDF file
+ :type outfile_topology_nc: file path .nc
+ :param river_outlet_ids: river_seg_ids that need to be set as outlet, comma seperated for multiple
+ :type river_outlet_ids: str
+ :param basin_hru_id: name of the routing basin id column in :param:infile_basin_shp , defaults to 'COMID'
+ :type basin_hru_id: str, optional
+ :param basin_hru_to_seg: name of the column that shows which river segment each HRU connects to, defaults to 'hru_to_seg'
+ :type basin_hru_to_seg: str, optional
+ :param basin_hru_area: Name of the catchment area column. Area must be in units [m^2], defaults to 'area'
+ :type basin_hru_area: str, optional
+ :param river_seg_id: Name of the segment ID column in :param:infile_river_shp, defaults to 'COMID'
+ :type river_seg_id: str, optional
+ :param river_down_seg_id: Name of the downstream segment ID column, defaults to 'NextDownID'
+ :type river_down_seg_id: str, optional
+ :param river_slope: Name of the slope column. Slope must be in in units [length/length]., defaults to 'slope'
+ :type river_slope: str, optional
+ :param river_length: Name of the segment length column. Length must be in units [m], defaults to 'length'
+ :type river_length: str, optional
+ :param fake_river: Flag to attempt creating fake river network for headwater basins , defaults to False
+ :type fake_river: bool, optional
+ """
+
+ shp_basin = gpd.read_file(infile_basin_shp)
+ if os.path.isfile(infile_river_shp):
+ # Open the shapefile
+ shp_river = gpd.read_file(infile_river_shp)
+
+ # Ensure that the most downstream segment in the river network has a downstream_ID of 0
+ # This indicates to mizuRoute that this segment has no downstream segment attached to it
+ enforce_outlets_from_control(shp_river, river_outlet_ids, river_seg_id, river_down_seg_id)
+ # Make the netcdf file
+ with nc4.Dataset(outfile_topology_nc, 'w', format='NETCDF4') as ncid:
+ # Set general attributes
+ now = datetime.now()
+ ncid.setncattr('Author', "Created by SUMMA workflow scripts")
+ ncid.setncattr('History', 'Created ' + now.strftime('%Y/%m/%d %H:%M:%S'))
+ ncid.setncattr('Purpose', 'Create a river network .nc file for mizuRoute routing')
+ # Define the seg and hru dimensions
+ num_seg = len(shp_river)
+ num_hru = len(shp_basin)
+ ncid.createDimension('seg', num_seg)
+ ncid.createDimension('hru', num_hru)
+ # --- Variables
+ create_and_fill_nc_var(ncid, 'segId', 'int', 'seg', False, shp_river[river_seg_id].values.astype(int), 'Unique ID of each stream segment', '-')
+ create_and_fill_nc_var(ncid, 'downSegId', 'int', 'seg', False, shp_river[river_down_seg_id].values.astype(int), 'ID of the downstream segment', '-')
+ create_and_fill_nc_var(ncid, 'slope', 'f8', 'seg', False, shp_river[river_slope].values.astype(float), 'Segment slope', '-')
+ create_and_fill_nc_var(ncid, 'length', 'f8', 'seg', False, shp_river[river_length].values.astype(float), 'Segment length', 'm')
+ create_and_fill_nc_var(ncid, 'hruId', 'int', 'hru', False, shp_basin[basin_hru_id].values.astype(int), 'Unique hru ID', '-')
+ create_and_fill_nc_var(ncid, 'hruToSegId', 'int', 'hru', False, shp_basin[basin_hru_to_seg].values.astype(int), 'ID of the stream segment to which the HRU discharges', '-')
+ create_and_fill_nc_var(ncid, 'area', 'f8', 'hru', False, shp_basin[basin_hru_area].values.astype(float), 'HRU area', 'm^2')
+ elif fake_river == True:
+ print('river network shapefile does not exist. generate a fake river network.')
+ if len(shp_basin) > 1:
+ sys.exit('len(shp_basin)>1, indicating this is not a headwater basin! please check!')
+ with nc4.Dataset(outfile_topology_nc, 'w', format='NETCDF4') as ncid:
+ # Set general attributes
+ now = datetime.now()
+ ncid.setncattr('Author', "Created by SUMMA workflow scripts")
+ ncid.setncattr('History', 'Created ' + now.strftime('%Y/%m/%d %H:%M:%S'))
+ ncid.setncattr('Purpose', 'Create a river network .nc file for mizuRoute routing')
+ # Define the seg and hru dimensions
+ num_seg = len(shp_basin)
+ num_hru = len(shp_basin)
+ ncid.createDimension('seg', num_seg)
+ ncid.createDimension('hru', num_hru)
+ # --- Variables
+ create_and_fill_nc_var(ncid, 'segId', 'int', 'seg', False, shp_basin[basin_hru_id].values.astype(int), 'Unique ID of each stream segment', '-')
+ create_and_fill_nc_var(ncid, 'downSegId', 'int', 'seg', False, np.zeros(num_seg), 'ID of the downstream segment', '-')
+ create_and_fill_nc_var(ncid, 'slope', 'f8', 'seg', False, np.ones(num_seg)*1e-5, 'Segment slope', '-')
+ create_and_fill_nc_var(ncid, 'length', 'f8', 'seg', False, np.ones(num_seg)*1, 'Segment length', 'm')
+ create_and_fill_nc_var(ncid, 'hruId', 'int', 'hru', False, shp_basin[basin_hru_id].values.astype(int), 'Unique hru ID', '-')
+ create_and_fill_nc_var(ncid, 'hruToSegId', 'int', 'hru', False, shp_basin[basin_hru_to_seg].values.astype(int), 'ID of the stream segment to which the HRU discharges', '-')
+ create_and_fill_nc_var(ncid, 'area', 'f8', 'hru', False, shp_basin[basin_hru_area].values.astype(float), 'HRU area', 'm^2')
+
+def generate_mizuroute_remap(infile_gruhru_shp, infile_basin_shp, outfile_routingremap_nc,
+ rm_shp_hru_id = 'COMID', hm_shp_gru_id = 'GRU_ID', remap_flag = True):
+ """Remaps SUMMA GRU to mizuRoute GRU
+
+ Note that this file is **_only_** needed if the defined SUMMA GRUs **_do not_** map 1:1 onto the routing basins as defined for mizuRoute. It is typically easiest to ensure this direct mapping. In cases where the routing basins are different from the GRUs used by SUMMA, this script generates the required mizuRoute input file to do so.
+
+ The optional remap file contains information about how the model elements of the Hydrologic Model (HM; i.e. SUMMA in this setup) map onto the routing basins used by the Routing Model (RM; i.e. mizuRoute). This information includes:
+ 1. Unique RM HRU IDs of the routing basins;
+ 2. Unique HM HRU IDs of the modeled basins (note that in this case what mizuRoute calls a "HM HRU" is equivalent to what SUMMA calls a GRU);
+ 3. The number of HM HRUs each RM HRU is overlapped by;
+ 4. The weights (relative area) each HM HRU contributes to each RM HRU.
+
+ IDs are taken from the user's shapefiles whereas overlap and weight are calculated based on an intersection of both shapefiles. See: https://mizuroute.readthedocs.io/en/master/Input_data.html
+
+ :param infile_gruhru_shp: _description_
+ :type infile_gruhru_shp: _type_
+ :param infile_basin_shp: path to basin shapefile
+ :type infile_basin_shp: file path .shp
+ :param outfile_routingremap_nc: _description_
+ :type outfile_routingremap_nc: _type_
+ :param rm_shp_hru_id: _description_, defaults to 'COMID'
+ :type rm_shp_hru_id: str, optional
+ :param hm_shp_gru_id: _description_, defaults to 'GRU_ID'
+ :type hm_shp_gru_id: str, optional
+ :param hm_shp_gru_id: flag to determine if remapping needs to be run, defaults to 'yes'
+ :type hm_shp_gru_id: str, optional
+ """
+ # only continue if the remap_flag is 'yes'
+ # the flag is 'yes' (str) for backwards compatability
+ if remap_flag.lower() != 'yes':
+ print('Active control file indicates remapping is not needed. Aborting.')
+ return
+
+ # Load both shapefiles
+ hm_shape = gpd.read_file(infile_gruhru_shp)
+ rm_shape = gpd.read_file(infile_basin_shp)
+
+ # Create an EASYMORE object
+ esmr_caller = esmr()
+
+ # Project both shapes to equal area
+ hm_shape = hm_shape.to_crs('EPSG:6933')
+ rm_shape = rm_shape.to_crs('EPSG:6933')
+
+ # Run the intersection
+ intersected_shape = esmr.intersection_shp(esmr_caller, rm_shape, hm_shape)
+
+ # Reproject the intersection to WSG84
+ intersected_shape = intersected_shape.to_crs('EPSG:4326')
+
+ # Save the intersection to file
+ # intersected_shape.to_file(outfile_gru_intersect_basin_shp)
+
+ # --- Pre-process the variables
+ # Define a few shorthand variables
+ int_rm_id = 'S_1_' + rm_shp_hru_id
+ int_hm_id = 'S_2_' + hm_shp_gru_id
+ int_weight = 'AP1N'
+
+ # Sort the intersected shape by RM ID first, and HM ID second. This means all info per RM ID is in consecutive rows
+ intersected_shape = intersected_shape.sort_values(by=[int_rm_id, int_hm_id])
+
+ # Routing Network HRU ID
+ nc_rnhruid = intersected_shape.groupby(int_rm_id).agg({int_rm_id: pd.unique}).values.astype(int)
+
+ # Number of Hydrologic Model elements (GRUs in SUMMA's case) per Routing Network catchment
+ nc_noverlaps = intersected_shape.groupby(int_rm_id).agg({int_hm_id: 'count'}).values.astype(int)
+
+ # Hydrologic Model GRU IDs that are associated with each part of the overlap
+ multi_nested_list = intersected_shape.groupby(int_rm_id).agg({int_hm_id: list}).values.tolist() # Get the data
+ nc_hmgruid = list(
+ itertools.chain.from_iterable(itertools.chain.from_iterable(multi_nested_list))) # Combine 3 nested list into 1
+
+ # Areal weight of each HM GRU per part of the overlaps
+ multi_nested_list = intersected_shape.groupby(int_rm_id).agg({int_weight: list}).values.tolist()
+ nc_weight = list(itertools.chain.from_iterable(itertools.chain.from_iterable(multi_nested_list)))
+
+ # --- Make the `.nc` file
+ # Find the dimension sizes
+ num_hru = len(rm_shape)
+ num_data = len(intersected_shape)
+
+ # Make the netcdf file
+ with nc4.Dataset(outfile_routingremap_nc, 'w', format='NETCDF4') as ncid:
+ # Set general attributes
+ now = datetime.now()
+ ncid.setncattr('Author', "Created by SUMMA workflow scripts")
+ ncid.setncattr('History', 'Created ' + now.strftime('%Y/%m/%d %H:%M:%S'))
+ ncid.setncattr('Purpose', 'Create a remapping .nc file for mizuRoute routing')
+ # Define the seg and hru dimensions
+ ncid.createDimension('hru', num_hru)
+ ncid.createDimension('data', num_data)
+ # --- Variables
+ create_and_fill_nc_var(ncid, 'RN_hruId', 'int', 'hru', False, nc_rnhruid, 'River network HRU ID', '-')
+ create_and_fill_nc_var(ncid, 'nOverlaps', 'int', 'hru', False, nc_noverlaps, 'Number of overlapping HM_HRUs for each RN_HRU', '-')
+ create_and_fill_nc_var(ncid, 'HM_hruId', 'int', 'data', False, nc_hmgruid, 'ID of overlapping HM_HRUs. Note that SUMMA calls these GRUs', '-')
+ create_and_fill_nc_var(ncid, 'weight', 'f8', 'data', False, nc_weight, 'Areal weight of overlapping HM_HRUs. Note that SUMMA calls these GRUs', '-')
diff --git a/cwarhm/model_specific_processing/summa.py b/cwarhm/model_specific_processing/summa.py
new file mode 100644
index 0000000..e69de29
diff --git a/cwarhm/util/__init__.py b/cwarhm/util/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/cwarhm/util/logger_config.ini b/cwarhm/util/logger_config.ini
new file mode 100644
index 0000000..19a27e8
--- /dev/null
+++ b/cwarhm/util/logger_config.ini
@@ -0,0 +1,36 @@
+[loggers]
+keys=root,sLogger
+
+[handlers]
+keys=consoleHandler,fileHandler
+
+[formatters]
+keys=fileFormatter,consoleFormatter
+
+[logger_root]
+level=DEBUG
+handlers=consoleHandler
+
+[logger_sLogger]
+level=DEBUG
+handlers=consoleHandler,fileHandler
+qualname=fileOutput
+propagate=0
+
+[handler_consoleHandler]
+class=StreamHandler
+level=DEBUG
+formatter=consoleFormatter
+args=(sys.stdout,)
+
+[handler_fileHandler]
+class=FileHandler
+level=DEBUG
+formatter=fileFormatter
+args=('%(logfilename)s',)
+
+[formatter_fileFormatter]
+format=%(asctime)s - %(filename)s - %(funcName)s - %(levelname)s - %(message)s
+
+[formatter_consoleFormatter]
+format=%(asctime)s - %(filename)s - %(funcName)s - %(levelname)s - %(message)s
\ No newline at end of file
diff --git a/cwarhm/util/util.py b/cwarhm/util/util.py
new file mode 100644
index 0000000..ad13d4c
--- /dev/null
+++ b/cwarhm/util/util.py
@@ -0,0 +1,179 @@
+import os
+import re
+from pathlib import Path
+from datetime import datetime
+import configparser
+import argparse
+import subprocess
+import ast
+import logging
+import logging.config
+from multiprocessing import Process
+logger = logging.getLogger('fileOutput')
+
+'''
+This file holds a collection of functions that support reading of control files
+making folder structures etc.
+'''
+
+def build_folder_structure(control_options):
+ """Build folder structure based on control options
+
+ Each folder must have 'path' in its key. No key with 'path'
+ should be a folder.
+
+ Parameters
+ ----------
+ control_options : dict
+ dictionairy with control options read from control file
+ with :func:read_summa_workflow_control_file
+ """
+ for key, value in control_options.items():
+ if 'path' in key:
+ this_dir = Path(value)
+ this_dir.mkdir(parents=True,exist_ok=True)
+
+def read_merit_credentials_file():
+ merit_login = {}
+ with open(os.path.expanduser("~/.merit")) as file:
+ for line in file:
+ (key, val) = line.split(':')
+ merit_login[key] = val.strip() # remove whitespace, newlines
+ return merit_login
+
+def unpack_year_range(yr_string):
+ """unpack the control 'forcing_raw_time' into list of all years
+ """
+ ys,ye = yr_string.split(',')
+ return [y for y in range(int(ys),int(ye)+1)]
+
+def read_summa_workflow_control_file(workflow_control_file,comment_char='#',option_char='|'):
+ """Read complete control data from the SUMMAworkflow (https://github.com/CH-Earth/summaWorkflow_public)
+ format control file
+
+ :param workflow_control_file: SUMMAworkflow control file path i.e./summaWorkflow_public/0_control_files/control_active.txt
+ :type workflow_control_file: string
+ :return: dictionary containing all options from the control file
+ :rtype: dict
+ """
+ control_options = {}
+ f = open(workflow_control_file)
+ for line in f:
+ # First, remove comments:
+ if comment_char in line:
+ # split on comment char, keep only the part before
+ line, comment = line.split(comment_char, 1)
+ print(line)
+ # Second, find lines with an option=value:
+ if option_char in line:
+ # split on option char:
+ option, value = line.split(option_char, 1)
+ # strip spaces:
+ option = option.strip()
+ value = value.strip()
+ # If value is 'default' exchange default value with default path given in comments
+ if value=='default':
+ # regex the default path from the comment
+ pattern = r"([^']*[^'])"
+ default_path_in_comment = re.findall(pattern,comment)[-2] #one to last, because of point at the end
+ # default path is always of the form root_path/domain_[name]/[last_part_of_path]
+ # replace 'root_path' with actual root_path from control
+ value = default_path_in_comment.replace('root_path',control_options['root_path'])
+ # replace '[name]' with domain name from control file
+ value = value.replace('[name]',control_options['domain_name'])
+ print('value')
+ # store in dictionary:
+ control_options[option] = value
+
+ f.close()
+
+ return control_options
+
+def get_summa_workflow_control_setting(workflow_control_file, setting):
+ """Read line item from the SUMMAworkflow (https://github.com/CH-Earth/summaWorkflow_public)
+ format control file.
+ Parameters
+ ----------
+ file : SUMMAworkflow control file path
+ i.e./summaWorkflow_public/0_control_files/control_active.txt
+ setting : line item of SUMMA workflow control file
+ i.e. catchment_shp_name
+ Returns
+ -------
+ substring : configuration setting from control file
+ i.e. bow_distributed_elevation_zone.shp
+ """
+
+ # Open 'control_active.txt' and ...
+ with open(workflow_control_file) as contents:
+ for line in contents:
+ # ... find the line with the requested setting
+ if setting in line:
+ break
+
+ # Extract the setting's value
+ substring = line.split('|', 1)[1] # Remove the setting's name (split into 2 based on '|', keep only 2nd part)
+ substring = substring.split('#', 1)[0] # Remove comments, does nothing if no '#' is found
+ substring = substring.strip() # Remove leading and trailing whitespace, tabs, newlines
+
+ # Return this value
+ return substring
+
+def start_logger(file_name='log_file'):
+ '''This function will create a logs folder and file in the same directory as the main script
+ It uses the logger_config.ini to set properties
+ Parameters
+ ----------
+ file_name : str
+ optional argument for name to be given to logfile
+ Returns
+ -------
+ logger : logging object
+ logger can be called with logger.debug("") etc to log to the console and output file
+ '''
+
+ now = datetime.now()
+ log_file_name = f'{file_name}_{now.strftime("%Y-%m-%d_%H:%M:%S")}.log'
+ working_folder = os.getcwd()
+
+ Path(working_folder,'logs').mkdir(parents=True, exist_ok=True)
+ logfile = os.path.join(working_folder,'logs',log_file_name)
+
+ logging.config.fileConfig(os.path.join(os.path.dirname(os.path.abspath(__file__)),'logger_config.ini'), defaults={'logfilename':logfile},disable_existing_loggers=False)
+ logging.getLogger('matplotlib.font_manager').disabled = True
+ logger = logging.getLogger('fileOutput')
+ logger.info(f'Log File Generated: {logfile}')
+
+ return logger
+
+def log_subprocess_output(pipe):
+ '''Function to log the output from a subprocess call'''
+ for line in iter(pipe.readline, b''): # b'\n'-separated lines
+ logging.info('External script logging: %r', line)
+
+def isstrbool(instr):
+ """Function to convert True to boolean"""
+ if instr == 'True':
+ flag = True
+ else:
+ flag = False
+ return flag
+
+
+def get_git_revision_hash(directory = "") -> str:
+ """Function to retrieve and log the git hash
+ If no directory is provided, the local git will be used.
+ An alternate git repo could also be referenced.
+ """
+ return subprocess.check_output(['git', 'rev-parse', 'HEAD',f'{directory}']).decode('ascii').strip()
+
+def run_in_parallel(*fns):
+ """runs list of fns functions in parallel using multiprocessing
+ """
+ proc = []
+ for fn in fns:
+ p = Process(target=fn)
+ p.start()
+ proc.append(p)
+ for p in proc:
+ p.join()
\ No newline at end of file
diff --git a/cwarhm/wrappers/__init__.py b/cwarhm/wrappers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/cwarhm/wrappers/cwarhm_summa.py b/cwarhm/wrappers/cwarhm_summa.py
new file mode 100644
index 0000000..54ff27b
--- /dev/null
+++ b/cwarhm/wrappers/cwarhm_summa.py
@@ -0,0 +1,1007 @@
+# mwah_wrapper.py
+
+import sys
+import os
+import subprocess
+import functools
+import shutil
+
+submodule_path_default = os.path.join(
+ os.path.dirname(os.path.realpath(__file__)),
+ "..",
+ "..",
+ "submodules",
+ "summaWorkflow_public",
+)
+
+
+def set_default_path(submodule_path):
+ if not submodule_path:
+ submodule_path = os.path.join(
+ os.path.dirname(os.path.realpath(__file__)),
+ "..",
+ "..",
+ "submodules",
+ "summaWorkflow_public",
+ )
+ return submodule_path
+
+
+##WIP
+def localworkingdir(func):
+ """Decorator - Changes working dir to script_path and resets working dir afterwards"""
+
+ @functools.wraps(func)
+ def wrapper(script_path, *args, **kwargs):
+ main_working_dir = os.getcwd()
+ script_dir = os.path.dirname(script_path)
+ os.chdir(script_dir)
+ print("working dir of decorator " + script_dir)
+ print(os.getcwd)
+ try:
+ return func(script_path, *args, **kwargs)
+ finally:
+ os.chdir(main_working_dir)
+
+
+def exec_python_lwd(script_path, *args, **kwargs):
+ """Executes python script with localized working directory.
+ Changes working dir to folder of python script before executing.
+ Then changes back to original working directory.
+
+ Args:
+ script_path (path string): Path to python script
+ """
+ main_working_dir = os.getcwd()
+ script_dir = os.path.dirname(script_path)
+ print("running wrapper for {}".format(script_path))
+ os.chdir(script_dir)
+ exec(open(script_path).read(), globals(), globals())
+ os.chdir(main_working_dir)
+
+#TODO write documentation
+#TODO finish up subprocess call
+def subprocess_lwd(script_path, *args, **kwargs):
+ main_working_dir = os.getcwd()
+ working_env = os.environ.copy()
+ #print(working_env['PATH'])
+ script_dir = os.path.dirname(script_path)
+ print("running wrapper for {}".format(script_path))
+ os.chdir(script_dir)
+ subprocess.run(["sh",script_path])
+ #subprocess.run(['conda info | grep -i "base environment"' , script_path],env=working_env,shell=True)
+ #subprocess.run(["bash -c 'source ~/opt/anaconda3/bin/activate summa-env'" , 'echo yes',script_path],shell=True)
+ #subprocess.Popen(script_path,env=working_env)
+ #proc = subprocess.run(['conda activate summa-env','conda env list'],executable="/bin/bash",shell=False)
+ #subprocess.run(". ~/opt/anaconda3/etc/profile.d/conda.sh && conda activate summa-env && {}".format(script_path),shell=True)
+ #subprocess.run(". ~/opt/anaconda3/etc/profile.d/conda.sh && conda env list",shell=True)
+ #print(proc)
+ os.chdir(main_working_dir)
+
+def run_jupyter_notebook(script_path, *args, **kwargs):
+ main_working_dir = os.getcwd()
+ script_dir = os.path.dirname(script_path)
+ print("running wrapper for {}".format(script_path))
+ os.chdir(script_dir)
+ subprocess.run("jupyter nbconvert --to notebook --execute {}".format(script_path),shell=True)
+ os.chdir(main_working_dir)
+
+def change_control_file_in_submodule(submodule_path: str = None, control_file_name: str = None):
+ """Copy control file in test folder to submodule folder and adjust
+ 'make_folder_structure' to reference this control file. Needs to be
+ run when starting a new run.
+ """
+ #test_file_dir = os.path.dirname(os.path.realpath(__file__))
+ test_file_dir = '.'
+ control_file_path = os.path.join(test_file_dir,control_file_name)
+ make_folder_structure_path = os.path.join(test_file_dir,'make_folder_structure.py')
+ print(os.getcwd())
+ target_folder_txt = os.path.join(submodule_path,'0_control_files')
+ target_folder_py= os.path.join(submodule_path,'1_folder_prep')
+ shutil.copy(control_file_path,os.path.join(target_folder_txt,control_file_name))
+ shutil.copy(control_file_path,os.path.join(target_folder_txt,'control_active.txt'))
+ shutil.copy(make_folder_structure_path,os.path.join(target_folder_py,'make_folder_structure.py'))
+
+#%% From here the wrapper functions start
+
+### 1 folder prep
+def create_folder_structure(submodule_path: str = None):
+ """Executes the code from summaWorkflow_public step 1_folder_prep. Description from local python file:
+ SUMMA workflow: make folder structure
+ Makes the initial folder structure for a given control file. All other files in the workflow will look for the file `control_active.txt` during their execution. This script:
+
+ 1. Copies the specified control file into `control_active.txt`;
+ 2. Prepares a folder structure using the settings in `control_active.txt`.
+ 3. Creates a copy of itself to be stored in the new folder structure.
+
+ The destination folders are referred to as "domain folders".
+
+ :param submodule_path: path to the summaWorkflow_public repository. Defaults to "../submodules/summaWorkflow_public".
+ :type submodule_path: str
+ """
+ if not submodule_path:
+ submodule_path = os.path.join(
+ os.path.dirname(os.path.realpath(__file__)),
+ "..",
+ "..",
+ "submodules",
+ "summaWorkflow_public",
+ )
+
+ python_file_to_run = os.path.join(
+ submodule_path, "1_folder_prep", "make_folder_structure.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+### 2 install
+def clone_summa_repo(
+ submodule_path: str = os.path.join(
+ os.path.join(
+ os.path.dirname(os.path.realpath(__file__)),
+ "..",
+ "..",
+ "submodules",
+ "summaWorkflow_public",
+ )
+ )
+):
+
+ """This code downloads the latest version of the code base. sh script.
+ Settings are given in the control file of mwah.
+
+ Relevant settings in `control_active.txt` that the code in this folder uses:
+ - **github_summa, github_mizu**: GitHub URLs from which to clone SUMMA and mizuRoute.
+
+ Args:
+ submodule_path (str, optional): path to the summaWorkflow_public repository.
+ Defaults to "../submodules/summaWorkflow_public".
+ """
+ script_path = os.path.join(submodule_path, "2_install", "1a_clone_summa.sh")
+ subprocess_lwd(script_path)
+
+
+def compile_summa(submodule_path: str):
+
+ """[Description]]
+
+ Args:
+ submodule_path (str: path to the summaWorkflow_public repository.
+ """
+ script_path = os.path.join(submodule_path, "2_install", "1b_compile_summa.sh")
+ subprocess_lwd(script_path)
+
+
+def clone_mizuroute_repo(submodule_path: str):
+ """[Description]]
+
+ Args:
+ submodule_path (str, optional): path to the summaWorkflow_public repository.
+ """
+ script_path = os.path.join(submodule_path, "2_install", "2a_clone_mizuroute.sh")
+ subprocess_lwd(script_path)
+
+
+def compile_mizuroute(submodule_path: str):
+ """[Description]]
+
+ Args:
+ submodule_path (str, optional): path to the summaWorkflow_public repository.
+ """
+ script_path = os.path.join(submodule_path, "2_install", "2b_compile_mizuroute.sh")
+ subprocess_lwd(script_path)
+
+
+### 3a forcing
+def download_ERA5_pressureLevel_annual(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "3a_forcing", "1a_download_forcing", "download_ERA5_pressureLevel_annual.ipynb"
+ )
+ run_jupyter_notebook(python_file_to_run)
+
+
+def download_ERA5_surfaceLevel_annual(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "3a_forcing", "1a_download_forcing", "download_ERA5_surfaceLevel_annual.ipynb"
+ )
+
+ run_jupyter_notebook(python_file_to_run)
+
+
+def run_download_ERA5_pressureLevel_paralell(submodule_path: str):
+
+ """[Description]]
+
+ Args:
+ submodule_path (str: path to the summaWorkflow_public repository.
+ """
+ script_path = os.path.join(submodule_path, "3a_forcing", "1a_download_forcing", "run_download_ERA5_pressureLevel.sh")
+ subprocess_lwd(script_path)
+
+
+def run_download_ERA5_surfaceLevel_paralell(submodule_path: str):
+
+ """[Description]]
+
+ Args:
+ submodule_path (str: path to the summaWorkflow_public repository.
+ """
+ script_path = os.path.join(submodule_path, "3a_forcing", "1a_download_forcing", "run_download_ERA5_surfaceLevel.sh")
+ subprocess_lwd(script_path)
+
+
+def download_ERA5_geopotential(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "3a_forcing", "1b_download_geopotential", "download_ERA5_geopotential.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+def merge_forcing(submodule_path: str = submodule_path_default):
+ """3a_forcing, 2_merge_forcing
+ Combine separate surface and pressure level downloads
+ Creates a single monthly `.nc` file with SUMMA-ready variables for further processing.
+ Combines ERA5's `u` and `v` wind components into a single directionless wind vector.
+
+ This script goes through the following steps:
+ 1. Convert longitude coordinates in pressureLevel file to range [-180,180]
+ 2. Checks
+ - are lat/lon the same for both data sets?
+ - are times the same for both datasets?
+ 3. Aggregate data into a single file 'ERA5_NA_[yyyymm].nc', keeping the relevant metadata in place
+
+ Args:
+ submodule_path (str, optional): path to the summaWorkflow_public repository. Defaults to submodule_path.
+ """
+ python_file_to_run = os.path.join(
+ submodule_path,
+ "3a_forcing",
+ "2_merge_forcing",
+ "ERA5_surface_and_pressure_level_combiner.py",
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+def create_ERA5_shapefile(submodule_path: str = submodule_path_default):
+ """mwah workflow 3a_forcing, 3_create_shapefile
+ The shapefile for the forcing data needs to represent the regular latitude/longitude grid of the ERA5 data. We need this for later intersection with the catchment shape(s) so we can create appropriately weighted forcing for each model element.
+
+ Notebook/script reads location of merged forcing data and the spatial extent of the data from the control file.
+
+ ## Assumptions not included in `control_active.txt`
+ - Code assumes that the merged forcing contains dimension variables with the names "latitude" and "longitude". This is the case for ERA5.
+
+ Args:
+ submodule_path (str, optional): [description]. Defaults to submodule_path.
+ """
+ python_file_to_run = os.path.join(
+ submodule_path, "3a_forcing", "3_create_shapefile", "create_ERA5_shapefile.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+### 3b parameters
+## Merit Hydro
+
+
+def download_merit_hydro_adjusted_elevation(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "3b_parameters", "MERIT_Hydro_DEM", "1_download", "download_merit_hydro_adjusted_elevation.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+def unpack_merit_hydro(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "3b_parameters", "MERIT_Hydro_DEM", "2_unpack", "unpack_merit_hydro_dem.sh"
+ )
+
+ subprocess_lwd(python_file_to_run)
+
+
+def create_merit_hydro_virtual_dataset(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ file_to_run = os.path.join(
+ submodule_path, "3b_parameters", "MERIT_Hydro_DEM", "3_create_vrt", "make_merit_dem_vrt.sh"
+ )
+
+ subprocess_lwd(file_to_run)
+
+
+def specify_merit_hydro_subdomain(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ file_to_run = os.path.join(
+ submodule_path, "3b_parameters", "MERIT_Hydro_DEM", "4_specify_subdomain", "specify_subdomain.sh"
+ )
+
+ subprocess_lwd(file_to_run)
+
+
+def convert_merit_hydro_vrt_to_tif(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ file_to_run = os.path.join(
+ submodule_path, "3b_parameters", "MERIT_Hydro_DEM", "5_convert_to_tif", "convert_vrt_to_tif.sh"
+ )
+
+ subprocess_lwd(file_to_run)
+
+
+## MODIS
+
+
+def download_modis_mcd12q1_v6(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "3b_parameters", "MODIS_MCD12Q1_V6", "1_download", "download_modis_mcd12q1_v6.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+def create_modis_virtual_dataset(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ file_to_run = os.path.join(
+ submodule_path, "3b_parameters", "MODIS_MCD12Q1_V6", "2_create_vrt", "make_vrt_per_year.sh"
+ )
+
+ subprocess_lwd(file_to_run)
+
+
+def reproject_modis_virtual_dataset(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ file_to_run = os.path.join(
+ submodule_path, "3b_parameters", "MODIS_MCD12Q1_V6", "3_reproject_vrt", "reproject_vrt.sh"
+ )
+
+ subprocess_lwd(file_to_run)
+
+
+def specify_modis_subdomain(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ file_to_run = os.path.join(
+ submodule_path, "3b_parameters", "MODIS_MCD12Q1_V6", "4_specify_subdomain", "specify_subdomain.sh"
+ )
+
+ subprocess_lwd(file_to_run)
+
+
+def create_multiband_modis_vrt(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ file_to_run = os.path.join(
+ submodule_path, "3b_parameters", "MODIS_MCD12Q1_V6", "5_multiband_vrt", "create_multiband_vrt.sh"
+ )
+
+ subprocess_lwd(file_to_run)
+
+
+def convert_modis_vrt_to_tif(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ file_to_run = os.path.join(
+ submodule_path, "3b_parameters", "MODIS_MCD12Q1_V6", "6_convert_to_tif", "convert_vrt_to_tif.sh"
+ )
+
+ subprocess_lwd(file_to_run)
+
+
+def find_mode_modis_landclass(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "3b_parameters", "MODIS_MCD12Q1_V6", "7_find_mode_land_class", "find_mode_landclass.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+### Soilgrids
+
+
+def download_soilgrids_soilclass_global(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "3b_parameters", "SOILGRIDS", "1_download", "download_soilclass_global_map.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+def extract_soilgrids_domain(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "3b_parameters", "SOILGRIDS", "2_extract_domain", "extract_domain.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+### 4a sort shape
+def sort_catchment_shape(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "4a_sort_shape", "1_sort_catchment_shape.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+### 4b remapping
+## 1 topo
+def find_HRU_elevation(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "4b_remapping", "1_topo", "1_find_HRU_elevation.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+def find_HRU_soil_classes(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "4b_remapping", "1_topo", "2_find_HRU_soil_classes.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+def find_HRU_land_classes(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "4b_remapping", "1_topo", "3_find_HRU_land_classes.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+## 2 forcing
+def make_single_weighted_forcing_file(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "4b_remapping", "2_forcing", "1_make_one_weighted_forcing_file.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+def make_all_weighted_forcing_files(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "4b_remapping", "2_forcing", "2_make_all_weighted_forcing_files.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+def temperature_lapsing_and_datastep(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "4b_remapping", "2_forcing", "3_temperature_lapsing_and_datastep.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+### 5 model input
+## mizuRoute
+def read_mizuroute_base_settings():
+ pass
+
+
+def copy_mizuroute_base_settings(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "5_model_input", "mizuRoute","1a_copy_base_settings", "1_copy_base_settings.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+def create_mizuroute_network_topology_file(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "5_model_input", "mizuRoute","1b_network_topology_file", "1_create_network_topology_file.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+def remap_summa_catchments_to_mizurouting(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "5_model_input", "mizuRoute","1c_optional_remapping_file", "1_remap_summa_catchments_to_routing.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+def create_mizuroute_control_file(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "5_model_input", "mizuRoute","1d_control_file", "1_create_control_file.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+## SUMMA
+def read_summa_base_settings():
+ pass
+
+
+def copy_summa_base_settings(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "5_model_input", "SUMMA","1a_copy_base_settings", "1_copy_base_settings.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+def create_summa_file_manager(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "5_model_input", "SUMMA","1b_file_manager", "1_create_file_manager.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+def create_summa_forcing_file_list(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "5_model_input", "SUMMA","1c_forcing_file_list", "1_create_forcing_file_list.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+def create_summa_cold_state(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "5_model_input", "SUMMA","1d_initial_conditions", "1_create_coldState.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+def create_summa_trial_parameters(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "5_model_input", "SUMMA","1e_trial_parameters", "1_create_trialParams.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+# attributes
+
+
+def initialize_summa_attributes_nc(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "5_model_input", "SUMMA","1f_attributes", "1_initialize_attributes_nc.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+def insert_soilclass_from_hist_into_summa_attributes(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "5_model_input", "SUMMA","1f_attributes", "2a_insert_soilclass_from_hist_into_attributes.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+def insert_landclass_from_hist_into_summa_attributes(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "5_model_input", "SUMMA","1f_attributes", "2b_insert_landclass_from_hist_into_attributes.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+def insert_elevation_from_hist_into_summa_attributes(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ python_file_to_run = os.path.join(
+ submodule_path, "5_model_input", "SUMMA","1f_attributes", "2c_insert_elevation_into_attributes.py"
+ )
+
+ exec_python_lwd(python_file_to_run)
+
+
+### 6 Model runs
+def run_summa(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ file_to_run = os.path.join(
+ submodule_path, "6_model_runs", "1_run_summa.sh"
+ )
+
+ subprocess_lwd(file_to_run)
+
+
+def run_summa_as_array(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ file_to_run = os.path.join(
+ submodule_path, "6_model_runs", "1_run_summa_as_array.sh"
+ )
+
+ subprocess_lwd(file_to_run)
+
+
+def run_mizuroute(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ file_to_run = os.path.join(
+ submodule_path, "6_model_runs", "2_run_mizuRoute.sh"
+ )
+
+ subprocess_lwd(file_to_run)
+
+
+### 7 Visualization
+
+
+def plot_mizuroute_and_summa_shapefiles(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ file_to_run = os.path.join(
+ submodule_path, "7_visualization", "1_mizuRoute_and_summa_shapefiles.ipynb"
+ )
+
+ run_jupyter_notebook(file_to_run)
+
+
+def plot_ERA5_download_coordinates_and_catchment_shapefile(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ file_to_run = os.path.join(
+ submodule_path, "7_visualization", "2_ERA5_download_coordinates_and_catchment_shapefile.ipynb"
+ )
+
+ run_jupyter_notebook(file_to_run)
+
+
+def plot_forcing_grid_vs_catchment_averaged(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ file_to_run = os.path.join(
+ submodule_path, "7_visualization", "3_forcing_grid_vs_catchment_averaged.ipynb"
+ )
+
+ run_jupyter_notebook(file_to_run)
+
+
+def plot_temperature_lapse_rates(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ file_to_run = os.path.join(
+ submodule_path, "7_visualization", "4_temperature_lapse_rates.ipynb"
+ )
+
+ run_jupyter_notebook(file_to_run)
+
+
+def plot_geospatial_parameters_to_model_elements(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ file_to_run = os.path.join(
+ submodule_path, "7_visualization", "5_geospatial_parameters_to_model_elements.ipynb"
+ )
+
+ run_jupyter_notebook(file_to_run)
+
+
+def plot_SWE_SM_ET_Q_per_GRU(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ file_to_run = os.path.join(
+ submodule_path, "7_visualization", "6_SWE_SM_ET_Q_per_GRU.ipynb"
+ )
+
+ run_jupyter_notebook(file_to_run)
+
+def plot_SWE_and_streamflow_per_HRU(submodule_path: str):
+ """[description]
+
+ :param submodule_path: path to the summaWorkflow_public repository.
+ :type submodule_path: str
+ """
+
+
+ file_to_run = os.path.join(
+ submodule_path, "7_visualization", "7_SWE_and_streamflow_per_HRU.ipynb"
+ )
+
+ run_jupyter_notebook(file_to_run)
+
+
+#%% test area
+mwah_sbmodule_folder = "/Users/ayx374/Documents/GitHub/forks/comphydShared_summa/submodules/summaWorkflow_public"
+
+# create_folder_structure(mwah_sbmodule_folder)
+# clone_summa_repo(mwah_sbmodule_folder)
+# clone_mizuroute_repo(mwah_sbmodule_folder)
+# merge_forcing(mwah_sbmodule_folder)
+# create_ERA5_shapefile(mwah_sbmodule_folder)
+#create_modis_virtual_dataset(mwah_sbmodule_folder)
diff --git a/dependencies/cwarhm-summa/.gitignore b/dependencies/cwarhm-summa/.gitignore
new file mode 100644
index 0000000..dd93839
--- /dev/null
+++ b/dependencies/cwarhm-summa/.gitignore
@@ -0,0 +1,14 @@
+# Logs and databases #
+######################
+slurm-*.out
+output*.out
+ERA5_createForcing_CONUS*.sh
+core.*
+
+# notebook stuff
+.ipynb_checkpoints
+
+# rtd
+rtd/build
+
+rtd/source/*.md
\ No newline at end of file
diff --git a/.readthedocs.yml b/dependencies/cwarhm-summa/.readthedocs.yml
similarity index 100%
rename from .readthedocs.yml
rename to dependencies/cwarhm-summa/.readthedocs.yml
diff --git a/0_control_files/README.md b/dependencies/cwarhm-summa/0_control_files/README.md
similarity index 100%
rename from 0_control_files/README.md
rename to dependencies/cwarhm-summa/0_control_files/README.md
diff --git a/0_control_files/control_Bow_at_Banff.txt b/dependencies/cwarhm-summa/0_control_files/control_Bow_at_Banff.txt
similarity index 100%
rename from 0_control_files/control_Bow_at_Banff.txt
rename to dependencies/cwarhm-summa/0_control_files/control_Bow_at_Banff.txt
diff --git a/dependencies/cwarhm-summa/0_control_files/control_Bow_at_Banff_test.txt b/dependencies/cwarhm-summa/0_control_files/control_Bow_at_Banff_test.txt
new file mode 100644
index 0000000..285d414
--- /dev/null
+++ b/dependencies/cwarhm-summa/0_control_files/control_Bow_at_Banff_test.txt
@@ -0,0 +1,230 @@
+# SUMMA workflow setting file.
+# Characters '|' and '#' are used as separators to find the actual setting values. Any text behind '|' is assumed to be part of the setting value, unless preceded by '#'.
+
+# Note on path specification
+# If deviating from default paths, a full path must be specified. E.g. '/home/user/non-default/path'
+
+
+# Modeling domain settings
+root_path | /Users/ayx374/Documents/project/chwarm_test_results2 # Root folder where data will be stored.
+domain_name | BowAtBanff # Used as part of the root folder name for the prepared data.
+
+
+# Shapefile settings - SUMMA catchment file
+catchment_shp_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/catchment'.
+catchment_shp_name | bow_distributed_elevation_zone.shp # Name of the catchment shapefile. Requires extension '.shp'.
+catchment_shp_gruid | GRU_ID # Name of the GRU ID column (can be any numeric value, HRU's within a single GRU have the same GRU ID).
+catchment_shp_hruid | HRU_ID # Name of the HRU ID column (consecutive from 1 to total number of HRUs, must be unique).
+catchment_shp_area | HRU_area # Name of the catchment area column. Area must be in units [m^2]
+catchment_shp_lat | center_lat # Name of the latitude column. Should be a value representative for the HRU. Typically the centroid.
+catchment_shp_lon | center_lon # Name of the longitude column. Should be a value representative for the HRU. Typically the centroid.
+
+
+# Shapefile settings - mizuRoute river network file
+river_network_shp_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/river_network'.
+river_network_shp_name | bow_river_network_from_merit_hydro.shp # Name of the river network shapefile. Requires extension '.shp'.
+river_network_shp_segid | COMID # Name of the segment ID column.
+river_network_shp_downsegid | NextDownID # Name of the downstream segment ID column.
+river_network_shp_slope | slope # Name of the slope column. Slope must be in in units [length/length].
+river_network_shp_length | length # Name of the segment length column. Length must be in units [m].
+
+
+# Shapefile settings - mizuRoute catchment file
+river_basin_shp_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/river_basins'.
+river_basin_shp_name | bow_distributed.shp # Name of the routing subbasins shapefile needed for remapping. Requires extension '.shp'.
+river_basin_shp_rm_hruid | COMID # Name of the routing basin ID column.
+river_basin_shp_area | area # Name of the catchment area column. Area must be in units [m^2]
+river_basin_shp_hru_to_seg | hru_to_seg # Name of the column that shows which river segment each HRU connects to.
+
+
+# Shapefile settings - SUMMA-to-mizuRoute
+river_basin_needs_remap | yes # 'no' if routing basins map 1:1 onto model GRUs. 'yes' if river segments span multiple GRUs or if multiple segments are inside a single GRU.
+
+
+# Install settings
+github_summa | https://github.com/CH-Earth/summa # Replace this with the path to your own fork if you forked the repo.
+github_mizuroute | https://github.com/ncar/mizuroute # Replace this with the path to your own fork if you forked the repo.
+install_path_summa | default # If 'default', clones source code into 'root_path/installs/summa'.
+install_path_mizuroute | default # If 'default', clones source code into 'root_path/installs/mizuRoute'.
+exe_name_summa | summa.exe # Name of the compiled executable.
+exe_name_mizuroute | mizuroute.exe # Name of the compiled executable.
+
+
+# Forcing settings
+forcing_raw_time | 2008,2013 # Years to download: Jan-[from],Dec-[to].
+forcing_raw_space | 51.74/-116.55/50.95/-115.52 # Bounding box of the shapefile: lat_max/lon_min/lat_min/lon_max. Will be converted to ERA5 download coordinates in script. Order and use of '/' to separate values is mandatory.
+forcing_time_step_size | 3600 # Size of the forcing time step in [s]. Must be constant.
+forcing_measurement_height | 3 # Reference height for forcing measurements [m].
+forcing_shape_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/forcing'.
+forcing_shape_name | era5_grid.shp # Name of the forcing shapefile. Requires extension '.shp'.
+forcing_shape_lat_name | lat # Name of the latitude field that contains the latitude of ERA5 data points.
+forcing_shape_lon_name | lon # Name of the longitude field that contains the latitude of ERA5 data points.
+forcing_geo_path | default # If 'default', uses 'root_path/domain_[name]/forcing/0_geopotential'.
+forcing_raw_path | default # If 'default', uses 'root_path/domain_[name]/forcing/1_ERA5_raw_data'.
+forcing_merged_path | default # If 'default', uses 'root_path/domain_[name]/forcing/2_merged_data'.
+forcing_easymore_path | default # If 'default', uses 'root_path/domain_[name]/forcing/3_temp_easymore'.
+forcing_basin_avg_path | default # If 'default', uses 'root_path/domain_[name]/forcing/3_basin_averaged_data'.
+forcing_summa_path | default # If 'default', uses 'root_path/domain_[name]/forcing/4_SUMMA_input'.
+
+
+# Parameter settings - DEM
+parameter_dem_main_url | http://hydro.iis.u-tokyo.ac.jp/~yamadai/MERIT_Hydro/distribute/v1.0.1/ # Primary download URL for MERIT Hydro adjusted elevation data. Needs to be appended with filenames.
+parameter_dem_file_template | elv_{}{}.tar # Template for download file names.
+parameter_dem_raw_path | default # If 'default', uses 'root_path/domain_[name]/parameters/dem/1_MERIT_hydro_raw_data'.
+parameter_dem_unpack_path | default # If 'default', uses 'root_path/domain_[name]/parameters/dem/2_MERIT_hydro_unpacked_data'.
+parameter_dem_vrt1_path | default # If 'default', uses 'root_path/domain_[name]/parameters/dem/3_vrt'.
+parameter_dem_vrt2_path | default # If 'default', uses 'root_path/domain_[name]/parameters/dem/4_domain_vrt'.
+parameter_dem_tif_path | default # If 'default', uses 'root_path/domain_[name]/parameters/dem/5_elevation'.
+parameter_dem_tif_name | elevation.tif # Name of the final DEM for the domain. Must be in .tif format.
+
+
+# Parameter settings - soil
+parameter_soil_hydro_ID | 1361509511e44adfba814f6950c6e742 # ID of the Hydroshare resource to download.
+parameter_soil_raw_path | default # If 'default', uses 'root_path/domain_[name]/parameters/soilclass/1_soil_classes_global'.
+parameter_soil_domain_path | default # If 'default', uses 'root_path/domain_[name]/parameters/soilclass/2_soil_classes_domain'.
+parameter_soil_tif_name | soil_classes.tif # Name of the final soil class overview for the domain. Must be in .tif format.
+
+
+# Parameter settings - land
+parameter_land_list_path | default # If 'default', uses 'summaWorkflow_public/3b_parameters/MODIS_MCD12Q1_V6/1_download/'. Location of file with data download links.
+parameter_land_list_name | daac_mcd12q1_data_links.txt # Name of file that contains list of MODIS download urls.
+parameter_land_raw_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/1_MODIS_raw_data'.
+parameter_land_vrt1_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/2_vrt_native_crs'. Virtual dataset composed of .hdf files.
+parameter_land_vrt2_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/3_vrt_epsg_4326'. Virtual dataset projected in EPSG:4326.
+parameter_land_vrt3_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/4_domain_vrt_epsg_4326'. Virtual dataset cropped to model domain.
+parameter_land_vrt4_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/5_multiband_domain_vrt_epsg_4326'. Multiband cropped virtual dataset.
+parameter_land_tif_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/6_tif_multiband'.
+parameter_land_mode_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/7_mode_land_class'.
+parameter_land_tif_name | land_classes.tif # Name of the final landclass overview for the domain. Must be in .tif format.
+
+
+# Intersection settings
+intersect_dem_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/catchment_intersection/with_dem'.
+intersect_dem_name | catchment_with_merit_dem.shp # Name of the shapefile with intersection between catchment and MERIT Hydro DEM, stored in column 'elev_mean'.
+intersect_soil_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/catchment_intersection/with_soilgrids'.
+intersect_soil_name | catchment_with_soilgrids.shp # Name of the shapefile with intersection between catchment and SOILGRIDS-derived USDA soil classes, stored in columns 'USDA_{1,...n}'
+intersect_land_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/catchment_intersection/with_modis'.
+intersect_land_name | catchment_with_modis.shp # Name of the shapefile with intersection between catchment and MODIS-derived IGBP land classes, stored in columns 'IGBP_{1,...n}'
+intersect_forcing_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/catchment_intersection/with_forcing'.
+intersect_routing_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/catchment_intersection/with_routing'.
+intersect_routing_name | catchment_with_routing_basins.shp # Name of the shapefile with intersection between hydrologic model catchments and routing model catchments.
+
+
+# Experiment settings - general
+experiment_id | run1 # Descriptor of the modelling experiment; used as output folder name.
+experiment_time_start | default # Simulation start. If 'default', constructs this from 'forcing_raw_time' setting and uses all downloaded forcing data; e.g. '1979-01-01 00:00'.
+experiment_time_end | default # Simulation end. If 'default', constructs this from 'forcing_raw_time' setting and uses all downloaded forcing data; e.g. '1979-12-31 23:00'.
+experiment_output_summa | default # If 'default', uses 'root_path/domain_[name]/simulations/[experiment_id]/SUMMA'.
+experiment_output_mizuRoute | default # If 'default', uses 'root_path/domain_[name]/simulations/[experiment_id]/mizuRoute'.
+experiment_log_summa | default # If 'default', uses 'root_path/domain_[name]/simulations/[experiment_id]/SUMMA/SUMMA_logs'.
+experiment_log_mizuroute | default # If 'default', uses 'root_path/domain_[name]/simulations/[experiment_id]/mizuRoute/mizuRoute_logs'.
+experiment_backup_settings | yes # Flag to (not) create a copy of the model settings in the output folder; "no" or "yes". Copying settings may be undesirable if files are large.
+
+
+# Experiment settings - SUMMA
+settings_summa_path | default # If 'default', uses 'root_path/domain_[name]/settings/SUMMA'.
+settings_summa_filemanager | fileManager.txt # Name of the file with the SUMMA inputs.
+settings_summa_coldstate | coldState.nc # Name of the file with intial states.
+settings_summa_trialParams | trialParams.nc # Name of the file that can contain trial parameter values (note, can be empty of any actual parameter values but must be provided and must contain an 'hruId' variable).
+settings_summa_forcing_list | forcingFileList.txt # Name of the file that has the list of forcing files.
+settings_summa_attributes | attributes.nc # Name of the attributes file.
+settings_summa_connect_HRUs | no # Attribute setting: "no" or "yes". Tricky concept, see README in ./5_model_input/SUMMA/3f_attributes. If no; all HRUs modeled as independent columns (downHRUindex = 0). If yes; HRUs within each GRU are connected based on relative HRU elevation (highest = upstream, lowest = outlet).
+settings_summa_trialParam_n | 1 # Number of trial parameter specifications. Specify 0 if none are wanted (they can still be included in this file but won't be read).
+settings_summa_trialParam_1 | maxstep,900 # Name of trial parameter and value to assign. Value assumed to be float.
+
+
+# Experiment settings - mizuRoute
+settings_mizu_path | default # If 'default', uses 'root_path/domain_[name]/settings/mizuRoute'.
+settings_mizu_parameters | param.nml.default # Name of the routing parameters file.
+settings_mizu_topology | topology.nc # Name of the river network topology file.
+settings_mizu_remap | routing_remap.nc # Name of the optional catchment remapping file, for cases when SUMMA uses different catchments than mizuRoute.
+settings_mizu_control_file | mizuroute.control # Name of the control file.
+settings_mizu_routing_var | averageRoutedRunoff # Name of SUMMA output variable to use for routing.
+settings_mizu_routing_units | m/s # Units of the variable to be routed.
+settings_mizu_routing_dt | 3600 # Size of the routing time step [s].
+settings_mizu_output_freq | annual # Frequency with which mizuRoute generates new output files. Must be one of 'single', 'day', 'month', 'annual'.
+settings_mizu_output_vars | 0 # Routing output. '0' for both KWT and IRF; '1' IRF only; '2' KWT only.
+settings_mizu_within_basin | 0 # '0' (no) or '1' (IRF routing). Flag to enable within-basin routing by mizuRoute. Should be set to 0 if SUMMA is run with "subRouting" decision "timeDlay".
+settings_mizu_make_outlet | 71028585 # Segment ID or IDs that should be set as network outlet. Specify multiple IDs separated by commas: X,Y,Z. Specify no IDs as: n/a. Note that this can also be done in the network shapefile.
+
+
+# Postprocessing settings
+visualization_folder | default # If 'default', uses 'root_path/domain_[name]/visualization'.
+
+
+# Default folder structure
+# Example of the resulting folder structure in "root_path".
+# New domains will go into their own folder.
+
+- summWorkflow_data
+ |
+ |_ domain_BowAtBanff
+ | |
+ | |_ forcing
+ | | |_ 0_geopotential
+ | | |_ 1_raw_data
+ | | |_ 2_merged_data
+ | | |_ 3_basin_averaged_data
+ | | |_ 4_SUMMA_input
+ | |
+ | |_ parameters
+ | | |_ soilclass
+ | | | |_ 1_soil_classes_global
+ | | | |_ 2_soil_classes_domain
+ | | |
+ | | |_ landclass
+ | | | |_ 1_MODIS_raw_data
+ | | | |_ 2_vrt_native_crs
+ | | | |_ 3_vrt_epsg_4326
+ | | | |_ 4_domain_vrt_epsg_4326
+ | | | |_ 5_multiband_domain_vrt_epsg_4326
+ | | | |_ 6_tif_multiband
+ | | | |_ 7_mode_land_class
+ | | |
+ | | |_ dem
+ | | |_ 1_MERIT_hydro_raw_data
+ | | |_ 2_MERIT_hydro_unpacked_data
+ | | |_ 3_vrt
+ | | |_ 4_domain_vrt
+ | | |_ 5_elevation
+ | |
+ | |_ settings
+ | | |_ mizuRoute
+ | | |_ SUMMA
+ | |
+ | |_ shapefiles
+ | | |_ catchment
+ | | |_ catchment_intersection
+ | | | |_ with_dem
+ | | | |_ with_forcing
+ | | | |_ with_soil
+ | | | |_ with_veg
+ | | |_ forcing
+ | | |_ river_basins
+ | | |_ river_network
+ | |
+ | |_ simulations
+ | | |_run1
+ | | | |_ 0_settings_backup
+ | | | | |_ summa
+ | | | | |_ mizuRoute
+ | | | |_ summa
+ | | | | |_run_settings
+ | | | | |_SUMMA_logs
+ | | | |_ mizuRoute
+ | | | | |_run_settings
+ | | | | |_mizuRoute_logs
+ | | |_run2
+ | | |_ ...
+ | |
+ | |_ visualization
+ |
+ |_ domain_global
+ | |_ ...
+ |
+ |_ domain_northAmerica
+ | |_ ...
+ |
+ |_ installs
+ |_ mizuRoute
+ |_ SUMMA
diff --git a/dependencies/cwarhm-summa/0_control_files/control_active.txt b/dependencies/cwarhm-summa/0_control_files/control_active.txt
new file mode 100644
index 0000000..285d414
--- /dev/null
+++ b/dependencies/cwarhm-summa/0_control_files/control_active.txt
@@ -0,0 +1,230 @@
+# SUMMA workflow setting file.
+# Characters '|' and '#' are used as separators to find the actual setting values. Any text behind '|' is assumed to be part of the setting value, unless preceded by '#'.
+
+# Note on path specification
+# If deviating from default paths, a full path must be specified. E.g. '/home/user/non-default/path'
+
+
+# Modeling domain settings
+root_path | /Users/ayx374/Documents/project/chwarm_test_results2 # Root folder where data will be stored.
+domain_name | BowAtBanff # Used as part of the root folder name for the prepared data.
+
+
+# Shapefile settings - SUMMA catchment file
+catchment_shp_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/catchment'.
+catchment_shp_name | bow_distributed_elevation_zone.shp # Name of the catchment shapefile. Requires extension '.shp'.
+catchment_shp_gruid | GRU_ID # Name of the GRU ID column (can be any numeric value, HRU's within a single GRU have the same GRU ID).
+catchment_shp_hruid | HRU_ID # Name of the HRU ID column (consecutive from 1 to total number of HRUs, must be unique).
+catchment_shp_area | HRU_area # Name of the catchment area column. Area must be in units [m^2]
+catchment_shp_lat | center_lat # Name of the latitude column. Should be a value representative for the HRU. Typically the centroid.
+catchment_shp_lon | center_lon # Name of the longitude column. Should be a value representative for the HRU. Typically the centroid.
+
+
+# Shapefile settings - mizuRoute river network file
+river_network_shp_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/river_network'.
+river_network_shp_name | bow_river_network_from_merit_hydro.shp # Name of the river network shapefile. Requires extension '.shp'.
+river_network_shp_segid | COMID # Name of the segment ID column.
+river_network_shp_downsegid | NextDownID # Name of the downstream segment ID column.
+river_network_shp_slope | slope # Name of the slope column. Slope must be in in units [length/length].
+river_network_shp_length | length # Name of the segment length column. Length must be in units [m].
+
+
+# Shapefile settings - mizuRoute catchment file
+river_basin_shp_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/river_basins'.
+river_basin_shp_name | bow_distributed.shp # Name of the routing subbasins shapefile needed for remapping. Requires extension '.shp'.
+river_basin_shp_rm_hruid | COMID # Name of the routing basin ID column.
+river_basin_shp_area | area # Name of the catchment area column. Area must be in units [m^2]
+river_basin_shp_hru_to_seg | hru_to_seg # Name of the column that shows which river segment each HRU connects to.
+
+
+# Shapefile settings - SUMMA-to-mizuRoute
+river_basin_needs_remap | yes # 'no' if routing basins map 1:1 onto model GRUs. 'yes' if river segments span multiple GRUs or if multiple segments are inside a single GRU.
+
+
+# Install settings
+github_summa | https://github.com/CH-Earth/summa # Replace this with the path to your own fork if you forked the repo.
+github_mizuroute | https://github.com/ncar/mizuroute # Replace this with the path to your own fork if you forked the repo.
+install_path_summa | default # If 'default', clones source code into 'root_path/installs/summa'.
+install_path_mizuroute | default # If 'default', clones source code into 'root_path/installs/mizuRoute'.
+exe_name_summa | summa.exe # Name of the compiled executable.
+exe_name_mizuroute | mizuroute.exe # Name of the compiled executable.
+
+
+# Forcing settings
+forcing_raw_time | 2008,2013 # Years to download: Jan-[from],Dec-[to].
+forcing_raw_space | 51.74/-116.55/50.95/-115.52 # Bounding box of the shapefile: lat_max/lon_min/lat_min/lon_max. Will be converted to ERA5 download coordinates in script. Order and use of '/' to separate values is mandatory.
+forcing_time_step_size | 3600 # Size of the forcing time step in [s]. Must be constant.
+forcing_measurement_height | 3 # Reference height for forcing measurements [m].
+forcing_shape_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/forcing'.
+forcing_shape_name | era5_grid.shp # Name of the forcing shapefile. Requires extension '.shp'.
+forcing_shape_lat_name | lat # Name of the latitude field that contains the latitude of ERA5 data points.
+forcing_shape_lon_name | lon # Name of the longitude field that contains the latitude of ERA5 data points.
+forcing_geo_path | default # If 'default', uses 'root_path/domain_[name]/forcing/0_geopotential'.
+forcing_raw_path | default # If 'default', uses 'root_path/domain_[name]/forcing/1_ERA5_raw_data'.
+forcing_merged_path | default # If 'default', uses 'root_path/domain_[name]/forcing/2_merged_data'.
+forcing_easymore_path | default # If 'default', uses 'root_path/domain_[name]/forcing/3_temp_easymore'.
+forcing_basin_avg_path | default # If 'default', uses 'root_path/domain_[name]/forcing/3_basin_averaged_data'.
+forcing_summa_path | default # If 'default', uses 'root_path/domain_[name]/forcing/4_SUMMA_input'.
+
+
+# Parameter settings - DEM
+parameter_dem_main_url | http://hydro.iis.u-tokyo.ac.jp/~yamadai/MERIT_Hydro/distribute/v1.0.1/ # Primary download URL for MERIT Hydro adjusted elevation data. Needs to be appended with filenames.
+parameter_dem_file_template | elv_{}{}.tar # Template for download file names.
+parameter_dem_raw_path | default # If 'default', uses 'root_path/domain_[name]/parameters/dem/1_MERIT_hydro_raw_data'.
+parameter_dem_unpack_path | default # If 'default', uses 'root_path/domain_[name]/parameters/dem/2_MERIT_hydro_unpacked_data'.
+parameter_dem_vrt1_path | default # If 'default', uses 'root_path/domain_[name]/parameters/dem/3_vrt'.
+parameter_dem_vrt2_path | default # If 'default', uses 'root_path/domain_[name]/parameters/dem/4_domain_vrt'.
+parameter_dem_tif_path | default # If 'default', uses 'root_path/domain_[name]/parameters/dem/5_elevation'.
+parameter_dem_tif_name | elevation.tif # Name of the final DEM for the domain. Must be in .tif format.
+
+
+# Parameter settings - soil
+parameter_soil_hydro_ID | 1361509511e44adfba814f6950c6e742 # ID of the Hydroshare resource to download.
+parameter_soil_raw_path | default # If 'default', uses 'root_path/domain_[name]/parameters/soilclass/1_soil_classes_global'.
+parameter_soil_domain_path | default # If 'default', uses 'root_path/domain_[name]/parameters/soilclass/2_soil_classes_domain'.
+parameter_soil_tif_name | soil_classes.tif # Name of the final soil class overview for the domain. Must be in .tif format.
+
+
+# Parameter settings - land
+parameter_land_list_path | default # If 'default', uses 'summaWorkflow_public/3b_parameters/MODIS_MCD12Q1_V6/1_download/'. Location of file with data download links.
+parameter_land_list_name | daac_mcd12q1_data_links.txt # Name of file that contains list of MODIS download urls.
+parameter_land_raw_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/1_MODIS_raw_data'.
+parameter_land_vrt1_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/2_vrt_native_crs'. Virtual dataset composed of .hdf files.
+parameter_land_vrt2_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/3_vrt_epsg_4326'. Virtual dataset projected in EPSG:4326.
+parameter_land_vrt3_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/4_domain_vrt_epsg_4326'. Virtual dataset cropped to model domain.
+parameter_land_vrt4_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/5_multiband_domain_vrt_epsg_4326'. Multiband cropped virtual dataset.
+parameter_land_tif_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/6_tif_multiband'.
+parameter_land_mode_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/7_mode_land_class'.
+parameter_land_tif_name | land_classes.tif # Name of the final landclass overview for the domain. Must be in .tif format.
+
+
+# Intersection settings
+intersect_dem_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/catchment_intersection/with_dem'.
+intersect_dem_name | catchment_with_merit_dem.shp # Name of the shapefile with intersection between catchment and MERIT Hydro DEM, stored in column 'elev_mean'.
+intersect_soil_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/catchment_intersection/with_soilgrids'.
+intersect_soil_name | catchment_with_soilgrids.shp # Name of the shapefile with intersection between catchment and SOILGRIDS-derived USDA soil classes, stored in columns 'USDA_{1,...n}'
+intersect_land_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/catchment_intersection/with_modis'.
+intersect_land_name | catchment_with_modis.shp # Name of the shapefile with intersection between catchment and MODIS-derived IGBP land classes, stored in columns 'IGBP_{1,...n}'
+intersect_forcing_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/catchment_intersection/with_forcing'.
+intersect_routing_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/catchment_intersection/with_routing'.
+intersect_routing_name | catchment_with_routing_basins.shp # Name of the shapefile with intersection between hydrologic model catchments and routing model catchments.
+
+
+# Experiment settings - general
+experiment_id | run1 # Descriptor of the modelling experiment; used as output folder name.
+experiment_time_start | default # Simulation start. If 'default', constructs this from 'forcing_raw_time' setting and uses all downloaded forcing data; e.g. '1979-01-01 00:00'.
+experiment_time_end | default # Simulation end. If 'default', constructs this from 'forcing_raw_time' setting and uses all downloaded forcing data; e.g. '1979-12-31 23:00'.
+experiment_output_summa | default # If 'default', uses 'root_path/domain_[name]/simulations/[experiment_id]/SUMMA'.
+experiment_output_mizuRoute | default # If 'default', uses 'root_path/domain_[name]/simulations/[experiment_id]/mizuRoute'.
+experiment_log_summa | default # If 'default', uses 'root_path/domain_[name]/simulations/[experiment_id]/SUMMA/SUMMA_logs'.
+experiment_log_mizuroute | default # If 'default', uses 'root_path/domain_[name]/simulations/[experiment_id]/mizuRoute/mizuRoute_logs'.
+experiment_backup_settings | yes # Flag to (not) create a copy of the model settings in the output folder; "no" or "yes". Copying settings may be undesirable if files are large.
+
+
+# Experiment settings - SUMMA
+settings_summa_path | default # If 'default', uses 'root_path/domain_[name]/settings/SUMMA'.
+settings_summa_filemanager | fileManager.txt # Name of the file with the SUMMA inputs.
+settings_summa_coldstate | coldState.nc # Name of the file with intial states.
+settings_summa_trialParams | trialParams.nc # Name of the file that can contain trial parameter values (note, can be empty of any actual parameter values but must be provided and must contain an 'hruId' variable).
+settings_summa_forcing_list | forcingFileList.txt # Name of the file that has the list of forcing files.
+settings_summa_attributes | attributes.nc # Name of the attributes file.
+settings_summa_connect_HRUs | no # Attribute setting: "no" or "yes". Tricky concept, see README in ./5_model_input/SUMMA/3f_attributes. If no; all HRUs modeled as independent columns (downHRUindex = 0). If yes; HRUs within each GRU are connected based on relative HRU elevation (highest = upstream, lowest = outlet).
+settings_summa_trialParam_n | 1 # Number of trial parameter specifications. Specify 0 if none are wanted (they can still be included in this file but won't be read).
+settings_summa_trialParam_1 | maxstep,900 # Name of trial parameter and value to assign. Value assumed to be float.
+
+
+# Experiment settings - mizuRoute
+settings_mizu_path | default # If 'default', uses 'root_path/domain_[name]/settings/mizuRoute'.
+settings_mizu_parameters | param.nml.default # Name of the routing parameters file.
+settings_mizu_topology | topology.nc # Name of the river network topology file.
+settings_mizu_remap | routing_remap.nc # Name of the optional catchment remapping file, for cases when SUMMA uses different catchments than mizuRoute.
+settings_mizu_control_file | mizuroute.control # Name of the control file.
+settings_mizu_routing_var | averageRoutedRunoff # Name of SUMMA output variable to use for routing.
+settings_mizu_routing_units | m/s # Units of the variable to be routed.
+settings_mizu_routing_dt | 3600 # Size of the routing time step [s].
+settings_mizu_output_freq | annual # Frequency with which mizuRoute generates new output files. Must be one of 'single', 'day', 'month', 'annual'.
+settings_mizu_output_vars | 0 # Routing output. '0' for both KWT and IRF; '1' IRF only; '2' KWT only.
+settings_mizu_within_basin | 0 # '0' (no) or '1' (IRF routing). Flag to enable within-basin routing by mizuRoute. Should be set to 0 if SUMMA is run with "subRouting" decision "timeDlay".
+settings_mizu_make_outlet | 71028585 # Segment ID or IDs that should be set as network outlet. Specify multiple IDs separated by commas: X,Y,Z. Specify no IDs as: n/a. Note that this can also be done in the network shapefile.
+
+
+# Postprocessing settings
+visualization_folder | default # If 'default', uses 'root_path/domain_[name]/visualization'.
+
+
+# Default folder structure
+# Example of the resulting folder structure in "root_path".
+# New domains will go into their own folder.
+
+- summWorkflow_data
+ |
+ |_ domain_BowAtBanff
+ | |
+ | |_ forcing
+ | | |_ 0_geopotential
+ | | |_ 1_raw_data
+ | | |_ 2_merged_data
+ | | |_ 3_basin_averaged_data
+ | | |_ 4_SUMMA_input
+ | |
+ | |_ parameters
+ | | |_ soilclass
+ | | | |_ 1_soil_classes_global
+ | | | |_ 2_soil_classes_domain
+ | | |
+ | | |_ landclass
+ | | | |_ 1_MODIS_raw_data
+ | | | |_ 2_vrt_native_crs
+ | | | |_ 3_vrt_epsg_4326
+ | | | |_ 4_domain_vrt_epsg_4326
+ | | | |_ 5_multiband_domain_vrt_epsg_4326
+ | | | |_ 6_tif_multiband
+ | | | |_ 7_mode_land_class
+ | | |
+ | | |_ dem
+ | | |_ 1_MERIT_hydro_raw_data
+ | | |_ 2_MERIT_hydro_unpacked_data
+ | | |_ 3_vrt
+ | | |_ 4_domain_vrt
+ | | |_ 5_elevation
+ | |
+ | |_ settings
+ | | |_ mizuRoute
+ | | |_ SUMMA
+ | |
+ | |_ shapefiles
+ | | |_ catchment
+ | | |_ catchment_intersection
+ | | | |_ with_dem
+ | | | |_ with_forcing
+ | | | |_ with_soil
+ | | | |_ with_veg
+ | | |_ forcing
+ | | |_ river_basins
+ | | |_ river_network
+ | |
+ | |_ simulations
+ | | |_run1
+ | | | |_ 0_settings_backup
+ | | | | |_ summa
+ | | | | |_ mizuRoute
+ | | | |_ summa
+ | | | | |_run_settings
+ | | | | |_SUMMA_logs
+ | | | |_ mizuRoute
+ | | | | |_run_settings
+ | | | | |_mizuRoute_logs
+ | | |_run2
+ | | |_ ...
+ | |
+ | |_ visualization
+ |
+ |_ domain_global
+ | |_ ...
+ |
+ |_ domain_northAmerica
+ | |_ ...
+ |
+ |_ installs
+ |_ mizuRoute
+ |_ SUMMA
diff --git a/0_example/README.md b/dependencies/cwarhm-summa/0_example/README.md
similarity index 100%
rename from 0_example/README.md
rename to dependencies/cwarhm-summa/0_example/README.md
diff --git a/0_example/shapefiles/catchment/_workflow_log/1_sort_catchment_shape.py b/dependencies/cwarhm-summa/0_example/shapefiles/catchment/_workflow_log/1_sort_catchment_shape.py
similarity index 100%
rename from 0_example/shapefiles/catchment/_workflow_log/1_sort_catchment_shape.py
rename to dependencies/cwarhm-summa/0_example/shapefiles/catchment/_workflow_log/1_sort_catchment_shape.py
diff --git a/0_example/shapefiles/catchment/_workflow_log/20210410_sort_shape.txt b/dependencies/cwarhm-summa/0_example/shapefiles/catchment/_workflow_log/20210410_sort_shape.txt
similarity index 100%
rename from 0_example/shapefiles/catchment/_workflow_log/20210410_sort_shape.txt
rename to dependencies/cwarhm-summa/0_example/shapefiles/catchment/_workflow_log/20210410_sort_shape.txt
diff --git a/0_example/shapefiles/catchment/bow_distributed_elevation_zone.cpg b/dependencies/cwarhm-summa/0_example/shapefiles/catchment/bow_distributed_elevation_zone.cpg
similarity index 100%
rename from 0_example/shapefiles/catchment/bow_distributed_elevation_zone.cpg
rename to dependencies/cwarhm-summa/0_example/shapefiles/catchment/bow_distributed_elevation_zone.cpg
diff --git a/0_example/shapefiles/catchment/bow_distributed_elevation_zone.dbf b/dependencies/cwarhm-summa/0_example/shapefiles/catchment/bow_distributed_elevation_zone.dbf
similarity index 100%
rename from 0_example/shapefiles/catchment/bow_distributed_elevation_zone.dbf
rename to dependencies/cwarhm-summa/0_example/shapefiles/catchment/bow_distributed_elevation_zone.dbf
diff --git a/0_example/shapefiles/catchment/bow_distributed_elevation_zone.prj b/dependencies/cwarhm-summa/0_example/shapefiles/catchment/bow_distributed_elevation_zone.prj
similarity index 100%
rename from 0_example/shapefiles/catchment/bow_distributed_elevation_zone.prj
rename to dependencies/cwarhm-summa/0_example/shapefiles/catchment/bow_distributed_elevation_zone.prj
diff --git a/0_example/shapefiles/catchment/bow_distributed_elevation_zone.shp b/dependencies/cwarhm-summa/0_example/shapefiles/catchment/bow_distributed_elevation_zone.shp
similarity index 100%
rename from 0_example/shapefiles/catchment/bow_distributed_elevation_zone.shp
rename to dependencies/cwarhm-summa/0_example/shapefiles/catchment/bow_distributed_elevation_zone.shp
diff --git a/0_example/shapefiles/catchment/bow_distributed_elevation_zone.shx b/dependencies/cwarhm-summa/0_example/shapefiles/catchment/bow_distributed_elevation_zone.shx
similarity index 100%
rename from 0_example/shapefiles/catchment/bow_distributed_elevation_zone.shx
rename to dependencies/cwarhm-summa/0_example/shapefiles/catchment/bow_distributed_elevation_zone.shx
diff --git a/0_example/shapefiles/river_basins/bow_distributed.cpg b/dependencies/cwarhm-summa/0_example/shapefiles/river_basins/bow_distributed.cpg
similarity index 100%
rename from 0_example/shapefiles/river_basins/bow_distributed.cpg
rename to dependencies/cwarhm-summa/0_example/shapefiles/river_basins/bow_distributed.cpg
diff --git a/0_example/shapefiles/river_basins/bow_distributed.dbf b/dependencies/cwarhm-summa/0_example/shapefiles/river_basins/bow_distributed.dbf
similarity index 100%
rename from 0_example/shapefiles/river_basins/bow_distributed.dbf
rename to dependencies/cwarhm-summa/0_example/shapefiles/river_basins/bow_distributed.dbf
diff --git a/0_example/shapefiles/river_basins/bow_distributed.prj b/dependencies/cwarhm-summa/0_example/shapefiles/river_basins/bow_distributed.prj
similarity index 100%
rename from 0_example/shapefiles/river_basins/bow_distributed.prj
rename to dependencies/cwarhm-summa/0_example/shapefiles/river_basins/bow_distributed.prj
diff --git a/0_example/shapefiles/river_basins/bow_distributed.shp b/dependencies/cwarhm-summa/0_example/shapefiles/river_basins/bow_distributed.shp
similarity index 100%
rename from 0_example/shapefiles/river_basins/bow_distributed.shp
rename to dependencies/cwarhm-summa/0_example/shapefiles/river_basins/bow_distributed.shp
diff --git a/0_example/shapefiles/river_basins/bow_distributed.shx b/dependencies/cwarhm-summa/0_example/shapefiles/river_basins/bow_distributed.shx
similarity index 100%
rename from 0_example/shapefiles/river_basins/bow_distributed.shx
rename to dependencies/cwarhm-summa/0_example/shapefiles/river_basins/bow_distributed.shx
diff --git a/0_example/shapefiles/river_network/bow_river_network_from_merit_hydro.cpg b/dependencies/cwarhm-summa/0_example/shapefiles/river_network/bow_river_network_from_merit_hydro.cpg
similarity index 100%
rename from 0_example/shapefiles/river_network/bow_river_network_from_merit_hydro.cpg
rename to dependencies/cwarhm-summa/0_example/shapefiles/river_network/bow_river_network_from_merit_hydro.cpg
diff --git a/0_example/shapefiles/river_network/bow_river_network_from_merit_hydro.dbf b/dependencies/cwarhm-summa/0_example/shapefiles/river_network/bow_river_network_from_merit_hydro.dbf
similarity index 100%
rename from 0_example/shapefiles/river_network/bow_river_network_from_merit_hydro.dbf
rename to dependencies/cwarhm-summa/0_example/shapefiles/river_network/bow_river_network_from_merit_hydro.dbf
diff --git a/0_example/shapefiles/river_network/bow_river_network_from_merit_hydro.prj b/dependencies/cwarhm-summa/0_example/shapefiles/river_network/bow_river_network_from_merit_hydro.prj
similarity index 100%
rename from 0_example/shapefiles/river_network/bow_river_network_from_merit_hydro.prj
rename to dependencies/cwarhm-summa/0_example/shapefiles/river_network/bow_river_network_from_merit_hydro.prj
diff --git a/0_example/shapefiles/river_network/bow_river_network_from_merit_hydro.shp b/dependencies/cwarhm-summa/0_example/shapefiles/river_network/bow_river_network_from_merit_hydro.shp
similarity index 100%
rename from 0_example/shapefiles/river_network/bow_river_network_from_merit_hydro.shp
rename to dependencies/cwarhm-summa/0_example/shapefiles/river_network/bow_river_network_from_merit_hydro.shp
diff --git a/0_example/shapefiles/river_network/bow_river_network_from_merit_hydro.shx b/dependencies/cwarhm-summa/0_example/shapefiles/river_network/bow_river_network_from_merit_hydro.shx
similarity index 100%
rename from 0_example/shapefiles/river_network/bow_river_network_from_merit_hydro.shx
rename to dependencies/cwarhm-summa/0_example/shapefiles/river_network/bow_river_network_from_merit_hydro.shx
diff --git a/0_tools/ERA5_check_merged_forcing_values.ipynb b/dependencies/cwarhm-summa/0_tools/ERA5_check_merged_forcing_values.ipynb
similarity index 100%
rename from 0_tools/ERA5_check_merged_forcing_values.ipynb
rename to dependencies/cwarhm-summa/0_tools/ERA5_check_merged_forcing_values.ipynb
diff --git a/0_tools/ERA5_check_merged_forcing_values.py b/dependencies/cwarhm-summa/0_tools/ERA5_check_merged_forcing_values.py
similarity index 100%
rename from 0_tools/ERA5_check_merged_forcing_values.py
rename to dependencies/cwarhm-summa/0_tools/ERA5_check_merged_forcing_values.py
diff --git a/0_tools/ERA5_find_download_coordinates_from_shapefile.ipynb b/dependencies/cwarhm-summa/0_tools/ERA5_find_download_coordinates_from_shapefile.ipynb
similarity index 100%
rename from 0_tools/ERA5_find_download_coordinates_from_shapefile.ipynb
rename to dependencies/cwarhm-summa/0_tools/ERA5_find_download_coordinates_from_shapefile.ipynb
diff --git a/0_tools/MIZUROUTE_split_out_to_statistics.sh b/dependencies/cwarhm-summa/0_tools/MIZUROUTE_split_out_to_statistics.sh
similarity index 100%
rename from 0_tools/MIZUROUTE_split_out_to_statistics.sh
rename to dependencies/cwarhm-summa/0_tools/MIZUROUTE_split_out_to_statistics.sh
diff --git a/0_tools/README.md b/dependencies/cwarhm-summa/0_tools/README.md
similarity index 100%
rename from 0_tools/README.md
rename to dependencies/cwarhm-summa/0_tools/README.md
diff --git a/0_tools/SUMMA_concat_split_summa.py b/dependencies/cwarhm-summa/0_tools/SUMMA_concat_split_summa.py
similarity index 100%
rename from 0_tools/SUMMA_concat_split_summa.py
rename to dependencies/cwarhm-summa/0_tools/SUMMA_concat_split_summa.py
diff --git a/0_tools/SUMMA_merge_restarts_into_warmState.py b/dependencies/cwarhm-summa/0_tools/SUMMA_merge_restarts_into_warmState.py
similarity index 100%
rename from 0_tools/SUMMA_merge_restarts_into_warmState.py
rename to dependencies/cwarhm-summa/0_tools/SUMMA_merge_restarts_into_warmState.py
diff --git a/0_tools/SUMMA_plot_computational_times.py b/dependencies/cwarhm-summa/0_tools/SUMMA_plot_computational_times.py
similarity index 100%
rename from 0_tools/SUMMA_plot_computational_times.py
rename to dependencies/cwarhm-summa/0_tools/SUMMA_plot_computational_times.py
diff --git a/0_tools/SUMMA_split_out_to_mizuRoute_split_in.py b/dependencies/cwarhm-summa/0_tools/SUMMA_split_out_to_mizuRoute_split_in.py
similarity index 100%
rename from 0_tools/SUMMA_split_out_to_mizuRoute_split_in.py
rename to dependencies/cwarhm-summa/0_tools/SUMMA_split_out_to_mizuRoute_split_in.py
diff --git a/0_tools/SUMMA_split_out_to_mizuRoute_split_in.sh b/dependencies/cwarhm-summa/0_tools/SUMMA_split_out_to_mizuRoute_split_in.sh
similarity index 100%
rename from 0_tools/SUMMA_split_out_to_mizuRoute_split_in.sh
rename to dependencies/cwarhm-summa/0_tools/SUMMA_split_out_to_mizuRoute_split_in.sh
diff --git a/0_tools/SUMMA_summarize_logs.py b/dependencies/cwarhm-summa/0_tools/SUMMA_summarize_logs.py
similarity index 100%
rename from 0_tools/SUMMA_summarize_logs.py
rename to dependencies/cwarhm-summa/0_tools/SUMMA_summarize_logs.py
diff --git a/0_tools/SUMMA_timeseries_to_statistics_parallel.py b/dependencies/cwarhm-summa/0_tools/SUMMA_timeseries_to_statistics_parallel.py
similarity index 100%
rename from 0_tools/SUMMA_timeseries_to_statistics_parallel.py
rename to dependencies/cwarhm-summa/0_tools/SUMMA_timeseries_to_statistics_parallel.py
diff --git a/1_folder_prep/README.md b/dependencies/cwarhm-summa/1_folder_prep/README.md
similarity index 100%
rename from 1_folder_prep/README.md
rename to dependencies/cwarhm-summa/1_folder_prep/README.md
diff --git a/1_folder_prep/make_folder_structure.ipynb b/dependencies/cwarhm-summa/1_folder_prep/make_folder_structure.ipynb
similarity index 100%
rename from 1_folder_prep/make_folder_structure.ipynb
rename to dependencies/cwarhm-summa/1_folder_prep/make_folder_structure.ipynb
diff --git a/1_folder_prep/make_folder_structure.py b/dependencies/cwarhm-summa/1_folder_prep/make_folder_structure.py
similarity index 98%
rename from 1_folder_prep/make_folder_structure.py
rename to dependencies/cwarhm-summa/1_folder_prep/make_folder_structure.py
index e3a11af..fa7cd2d 100644
--- a/1_folder_prep/make_folder_structure.py
+++ b/dependencies/cwarhm-summa/1_folder_prep/make_folder_structure.py
@@ -13,7 +13,7 @@
'''
# Specify the control file to use
-sourceFile = 'control_Bow_at_Banff.txt'
+sourceFile = 'control_Bow_at_Banff_test.txt'
# --- Do not change below this line.
diff --git a/2_install/1a_clone_summa.sh b/dependencies/cwarhm-summa/2_install/1a_clone_summa.sh
similarity index 100%
rename from 2_install/1a_clone_summa.sh
rename to dependencies/cwarhm-summa/2_install/1a_clone_summa.sh
diff --git a/2_install/1b_compile_summa.sh b/dependencies/cwarhm-summa/2_install/1b_compile_summa.sh
similarity index 100%
rename from 2_install/1b_compile_summa.sh
rename to dependencies/cwarhm-summa/2_install/1b_compile_summa.sh
diff --git a/2_install/2a_clone_mizuroute.sh b/dependencies/cwarhm-summa/2_install/2a_clone_mizuroute.sh
similarity index 100%
rename from 2_install/2a_clone_mizuroute.sh
rename to dependencies/cwarhm-summa/2_install/2a_clone_mizuroute.sh
diff --git a/2_install/2b_compile_mizuroute.sh b/dependencies/cwarhm-summa/2_install/2b_compile_mizuroute.sh
similarity index 100%
rename from 2_install/2b_compile_mizuroute.sh
rename to dependencies/cwarhm-summa/2_install/2b_compile_mizuroute.sh
diff --git a/2_install/README.md b/dependencies/cwarhm-summa/2_install/README.md
similarity index 100%
rename from 2_install/README.md
rename to dependencies/cwarhm-summa/2_install/README.md
diff --git a/3a_forcing/1a_download_forcing/README.md b/dependencies/cwarhm-summa/3a_forcing/1a_download_forcing/README.md
similarity index 100%
rename from 3a_forcing/1a_download_forcing/README.md
rename to dependencies/cwarhm-summa/3a_forcing/1a_download_forcing/README.md
diff --git a/3a_forcing/1a_download_forcing/download_ERA5_pressureLevel_annual.ipynb b/dependencies/cwarhm-summa/3a_forcing/1a_download_forcing/download_ERA5_pressureLevel_annual.ipynb
similarity index 100%
rename from 3a_forcing/1a_download_forcing/download_ERA5_pressureLevel_annual.ipynb
rename to dependencies/cwarhm-summa/3a_forcing/1a_download_forcing/download_ERA5_pressureLevel_annual.ipynb
diff --git a/3a_forcing/1a_download_forcing/download_ERA5_pressureLevel_annual.py b/dependencies/cwarhm-summa/3a_forcing/1a_download_forcing/download_ERA5_pressureLevel_annual.py
similarity index 100%
rename from 3a_forcing/1a_download_forcing/download_ERA5_pressureLevel_annual.py
rename to dependencies/cwarhm-summa/3a_forcing/1a_download_forcing/download_ERA5_pressureLevel_annual.py
diff --git a/3a_forcing/1a_download_forcing/download_ERA5_surfaceLevel_annual.ipynb b/dependencies/cwarhm-summa/3a_forcing/1a_download_forcing/download_ERA5_surfaceLevel_annual.ipynb
similarity index 100%
rename from 3a_forcing/1a_download_forcing/download_ERA5_surfaceLevel_annual.ipynb
rename to dependencies/cwarhm-summa/3a_forcing/1a_download_forcing/download_ERA5_surfaceLevel_annual.ipynb
diff --git a/3a_forcing/1a_download_forcing/download_ERA5_surfaceLevel_annual.py b/dependencies/cwarhm-summa/3a_forcing/1a_download_forcing/download_ERA5_surfaceLevel_annual.py
similarity index 100%
rename from 3a_forcing/1a_download_forcing/download_ERA5_surfaceLevel_annual.py
rename to dependencies/cwarhm-summa/3a_forcing/1a_download_forcing/download_ERA5_surfaceLevel_annual.py
diff --git a/3a_forcing/1a_download_forcing/run_download_ERA5_pressureLevel.sh b/dependencies/cwarhm-summa/3a_forcing/1a_download_forcing/run_download_ERA5_pressureLevel.sh
similarity index 100%
rename from 3a_forcing/1a_download_forcing/run_download_ERA5_pressureLevel.sh
rename to dependencies/cwarhm-summa/3a_forcing/1a_download_forcing/run_download_ERA5_pressureLevel.sh
diff --git a/3a_forcing/1a_download_forcing/run_download_ERA5_surfaceLevel.sh b/dependencies/cwarhm-summa/3a_forcing/1a_download_forcing/run_download_ERA5_surfaceLevel.sh
similarity index 100%
rename from 3a_forcing/1a_download_forcing/run_download_ERA5_surfaceLevel.sh
rename to dependencies/cwarhm-summa/3a_forcing/1a_download_forcing/run_download_ERA5_surfaceLevel.sh
diff --git a/3a_forcing/1b_download_geopotential/README.md b/dependencies/cwarhm-summa/3a_forcing/1b_download_geopotential/README.md
similarity index 100%
rename from 3a_forcing/1b_download_geopotential/README.md
rename to dependencies/cwarhm-summa/3a_forcing/1b_download_geopotential/README.md
diff --git a/3a_forcing/1b_download_geopotential/download_ERA5_geopotential.ipynb b/dependencies/cwarhm-summa/3a_forcing/1b_download_geopotential/download_ERA5_geopotential.ipynb
similarity index 100%
rename from 3a_forcing/1b_download_geopotential/download_ERA5_geopotential.ipynb
rename to dependencies/cwarhm-summa/3a_forcing/1b_download_geopotential/download_ERA5_geopotential.ipynb
diff --git a/3a_forcing/1b_download_geopotential/download_ERA5_geopotential.py b/dependencies/cwarhm-summa/3a_forcing/1b_download_geopotential/download_ERA5_geopotential.py
similarity index 100%
rename from 3a_forcing/1b_download_geopotential/download_ERA5_geopotential.py
rename to dependencies/cwarhm-summa/3a_forcing/1b_download_geopotential/download_ERA5_geopotential.py
diff --git a/3a_forcing/2_merge_forcing/ERA5_surface_and_pressure_level_combiner.ipynb b/dependencies/cwarhm-summa/3a_forcing/2_merge_forcing/ERA5_surface_and_pressure_level_combiner.ipynb
similarity index 100%
rename from 3a_forcing/2_merge_forcing/ERA5_surface_and_pressure_level_combiner.ipynb
rename to dependencies/cwarhm-summa/3a_forcing/2_merge_forcing/ERA5_surface_and_pressure_level_combiner.ipynb
diff --git a/3a_forcing/2_merge_forcing/ERA5_surface_and_pressure_level_combiner.py b/dependencies/cwarhm-summa/3a_forcing/2_merge_forcing/ERA5_surface_and_pressure_level_combiner.py
similarity index 100%
rename from 3a_forcing/2_merge_forcing/ERA5_surface_and_pressure_level_combiner.py
rename to dependencies/cwarhm-summa/3a_forcing/2_merge_forcing/ERA5_surface_and_pressure_level_combiner.py
diff --git a/3a_forcing/2_merge_forcing/README.md b/dependencies/cwarhm-summa/3a_forcing/2_merge_forcing/README.md
similarity index 100%
rename from 3a_forcing/2_merge_forcing/README.md
rename to dependencies/cwarhm-summa/3a_forcing/2_merge_forcing/README.md
diff --git a/3a_forcing/3_create_shapefile/README.md b/dependencies/cwarhm-summa/3a_forcing/3_create_shapefile/README.md
similarity index 100%
rename from 3a_forcing/3_create_shapefile/README.md
rename to dependencies/cwarhm-summa/3a_forcing/3_create_shapefile/README.md
diff --git a/3a_forcing/3_create_shapefile/create_ERA5_shapefile.ipynb b/dependencies/cwarhm-summa/3a_forcing/3_create_shapefile/create_ERA5_shapefile.ipynb
similarity index 100%
rename from 3a_forcing/3_create_shapefile/create_ERA5_shapefile.ipynb
rename to dependencies/cwarhm-summa/3a_forcing/3_create_shapefile/create_ERA5_shapefile.ipynb
diff --git a/3a_forcing/3_create_shapefile/create_ERA5_shapefile.py b/dependencies/cwarhm-summa/3a_forcing/3_create_shapefile/create_ERA5_shapefile.py
similarity index 100%
rename from 3a_forcing/3_create_shapefile/create_ERA5_shapefile.py
rename to dependencies/cwarhm-summa/3a_forcing/3_create_shapefile/create_ERA5_shapefile.py
diff --git a/3a_forcing/README.md b/dependencies/cwarhm-summa/3a_forcing/README.md
similarity index 100%
rename from 3a_forcing/README.md
rename to dependencies/cwarhm-summa/3a_forcing/README.md
diff --git a/3b_parameters/MERIT_Hydro_DEM/1_download/README.md b/dependencies/cwarhm-summa/3b_parameters/MERIT_Hydro_DEM/1_download/README.md
similarity index 100%
rename from 3b_parameters/MERIT_Hydro_DEM/1_download/README.md
rename to dependencies/cwarhm-summa/3b_parameters/MERIT_Hydro_DEM/1_download/README.md
diff --git a/3b_parameters/MERIT_Hydro_DEM/1_download/download_merit_hydro_adjusted_elevation.ipynb b/dependencies/cwarhm-summa/3b_parameters/MERIT_Hydro_DEM/1_download/download_merit_hydro_adjusted_elevation.ipynb
similarity index 100%
rename from 3b_parameters/MERIT_Hydro_DEM/1_download/download_merit_hydro_adjusted_elevation.ipynb
rename to dependencies/cwarhm-summa/3b_parameters/MERIT_Hydro_DEM/1_download/download_merit_hydro_adjusted_elevation.ipynb
diff --git a/3b_parameters/MERIT_Hydro_DEM/1_download/download_merit_hydro_adjusted_elevation.py b/dependencies/cwarhm-summa/3b_parameters/MERIT_Hydro_DEM/1_download/download_merit_hydro_adjusted_elevation.py
similarity index 100%
rename from 3b_parameters/MERIT_Hydro_DEM/1_download/download_merit_hydro_adjusted_elevation.py
rename to dependencies/cwarhm-summa/3b_parameters/MERIT_Hydro_DEM/1_download/download_merit_hydro_adjusted_elevation.py
diff --git a/3b_parameters/MERIT_Hydro_DEM/2_unpack/unpack_merit_hydro_dem.sh b/dependencies/cwarhm-summa/3b_parameters/MERIT_Hydro_DEM/2_unpack/unpack_merit_hydro_dem.sh
similarity index 100%
rename from 3b_parameters/MERIT_Hydro_DEM/2_unpack/unpack_merit_hydro_dem.sh
rename to dependencies/cwarhm-summa/3b_parameters/MERIT_Hydro_DEM/2_unpack/unpack_merit_hydro_dem.sh
diff --git a/3b_parameters/MERIT_Hydro_DEM/3_create_vrt/README.md b/dependencies/cwarhm-summa/3b_parameters/MERIT_Hydro_DEM/3_create_vrt/README.md
similarity index 100%
rename from 3b_parameters/MERIT_Hydro_DEM/3_create_vrt/README.md
rename to dependencies/cwarhm-summa/3b_parameters/MERIT_Hydro_DEM/3_create_vrt/README.md
diff --git a/3b_parameters/MERIT_Hydro_DEM/3_create_vrt/make_merit_dem_vrt.sh b/dependencies/cwarhm-summa/3b_parameters/MERIT_Hydro_DEM/3_create_vrt/make_merit_dem_vrt.sh
similarity index 100%
rename from 3b_parameters/MERIT_Hydro_DEM/3_create_vrt/make_merit_dem_vrt.sh
rename to dependencies/cwarhm-summa/3b_parameters/MERIT_Hydro_DEM/3_create_vrt/make_merit_dem_vrt.sh
diff --git a/3b_parameters/MERIT_Hydro_DEM/4_specify_subdomain/README.md b/dependencies/cwarhm-summa/3b_parameters/MERIT_Hydro_DEM/4_specify_subdomain/README.md
similarity index 100%
rename from 3b_parameters/MERIT_Hydro_DEM/4_specify_subdomain/README.md
rename to dependencies/cwarhm-summa/3b_parameters/MERIT_Hydro_DEM/4_specify_subdomain/README.md
diff --git a/3b_parameters/MERIT_Hydro_DEM/4_specify_subdomain/specify_subdomain.sh b/dependencies/cwarhm-summa/3b_parameters/MERIT_Hydro_DEM/4_specify_subdomain/specify_subdomain.sh
similarity index 100%
rename from 3b_parameters/MERIT_Hydro_DEM/4_specify_subdomain/specify_subdomain.sh
rename to dependencies/cwarhm-summa/3b_parameters/MERIT_Hydro_DEM/4_specify_subdomain/specify_subdomain.sh
diff --git a/3b_parameters/MERIT_Hydro_DEM/5_convert_to_tif/README.md b/dependencies/cwarhm-summa/3b_parameters/MERIT_Hydro_DEM/5_convert_to_tif/README.md
similarity index 100%
rename from 3b_parameters/MERIT_Hydro_DEM/5_convert_to_tif/README.md
rename to dependencies/cwarhm-summa/3b_parameters/MERIT_Hydro_DEM/5_convert_to_tif/README.md
diff --git a/3b_parameters/MERIT_Hydro_DEM/5_convert_to_tif/convert_vrt_to_tif.sh b/dependencies/cwarhm-summa/3b_parameters/MERIT_Hydro_DEM/5_convert_to_tif/convert_vrt_to_tif.sh
similarity index 100%
rename from 3b_parameters/MERIT_Hydro_DEM/5_convert_to_tif/convert_vrt_to_tif.sh
rename to dependencies/cwarhm-summa/3b_parameters/MERIT_Hydro_DEM/5_convert_to_tif/convert_vrt_to_tif.sh
diff --git a/3b_parameters/MERIT_Hydro_DEM/README.md b/dependencies/cwarhm-summa/3b_parameters/MERIT_Hydro_DEM/README.md
similarity index 100%
rename from 3b_parameters/MERIT_Hydro_DEM/README.md
rename to dependencies/cwarhm-summa/3b_parameters/MERIT_Hydro_DEM/README.md
diff --git a/3b_parameters/MODIS_MCD12Q1_V6/1_download/README.md b/dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/1_download/README.md
similarity index 100%
rename from 3b_parameters/MODIS_MCD12Q1_V6/1_download/README.md
rename to dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/1_download/README.md
diff --git a/3b_parameters/MODIS_MCD12Q1_V6/1_download/daac_mcd12q1_data_links.txt b/dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/1_download/daac_mcd12q1_data_links.txt
similarity index 100%
rename from 3b_parameters/MODIS_MCD12Q1_V6/1_download/daac_mcd12q1_data_links.txt
rename to dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/1_download/daac_mcd12q1_data_links.txt
diff --git a/3b_parameters/MODIS_MCD12Q1_V6/1_download/download_modis_mcd12q1_v6.ipynb b/dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/1_download/download_modis_mcd12q1_v6.ipynb
similarity index 100%
rename from 3b_parameters/MODIS_MCD12Q1_V6/1_download/download_modis_mcd12q1_v6.ipynb
rename to dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/1_download/download_modis_mcd12q1_v6.ipynb
diff --git a/3b_parameters/MODIS_MCD12Q1_V6/1_download/download_modis_mcd12q1_v6.py b/dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/1_download/download_modis_mcd12q1_v6.py
similarity index 100%
rename from 3b_parameters/MODIS_MCD12Q1_V6/1_download/download_modis_mcd12q1_v6.py
rename to dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/1_download/download_modis_mcd12q1_v6.py
diff --git a/3b_parameters/MODIS_MCD12Q1_V6/2_create_vrt/README.md b/dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/2_create_vrt/README.md
similarity index 100%
rename from 3b_parameters/MODIS_MCD12Q1_V6/2_create_vrt/README.md
rename to dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/2_create_vrt/README.md
diff --git a/3b_parameters/MODIS_MCD12Q1_V6/2_create_vrt/make_vrt_per_year.sh b/dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/2_create_vrt/make_vrt_per_year.sh
similarity index 100%
rename from 3b_parameters/MODIS_MCD12Q1_V6/2_create_vrt/make_vrt_per_year.sh
rename to dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/2_create_vrt/make_vrt_per_year.sh
diff --git a/3b_parameters/MODIS_MCD12Q1_V6/3_reproject_vrt/README.md b/dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/3_reproject_vrt/README.md
similarity index 100%
rename from 3b_parameters/MODIS_MCD12Q1_V6/3_reproject_vrt/README.md
rename to dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/3_reproject_vrt/README.md
diff --git a/3b_parameters/MODIS_MCD12Q1_V6/3_reproject_vrt/reproject_vrt.sh b/dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/3_reproject_vrt/reproject_vrt.sh
similarity index 100%
rename from 3b_parameters/MODIS_MCD12Q1_V6/3_reproject_vrt/reproject_vrt.sh
rename to dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/3_reproject_vrt/reproject_vrt.sh
diff --git a/3b_parameters/MODIS_MCD12Q1_V6/4_specify_subdomain/README.md b/dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/4_specify_subdomain/README.md
similarity index 100%
rename from 3b_parameters/MODIS_MCD12Q1_V6/4_specify_subdomain/README.md
rename to dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/4_specify_subdomain/README.md
diff --git a/3b_parameters/MODIS_MCD12Q1_V6/4_specify_subdomain/specify_subdomain.sh b/dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/4_specify_subdomain/specify_subdomain.sh
similarity index 100%
rename from 3b_parameters/MODIS_MCD12Q1_V6/4_specify_subdomain/specify_subdomain.sh
rename to dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/4_specify_subdomain/specify_subdomain.sh
diff --git a/3b_parameters/MODIS_MCD12Q1_V6/5_multiband_vrt/README.md b/dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/5_multiband_vrt/README.md
similarity index 100%
rename from 3b_parameters/MODIS_MCD12Q1_V6/5_multiband_vrt/README.md
rename to dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/5_multiband_vrt/README.md
diff --git a/3b_parameters/MODIS_MCD12Q1_V6/5_multiband_vrt/create_multiband_vrt.sh b/dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/5_multiband_vrt/create_multiband_vrt.sh
similarity index 100%
rename from 3b_parameters/MODIS_MCD12Q1_V6/5_multiband_vrt/create_multiband_vrt.sh
rename to dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/5_multiband_vrt/create_multiband_vrt.sh
diff --git a/3b_parameters/MODIS_MCD12Q1_V6/6_convert_to_tif/README.md b/dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/6_convert_to_tif/README.md
similarity index 100%
rename from 3b_parameters/MODIS_MCD12Q1_V6/6_convert_to_tif/README.md
rename to dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/6_convert_to_tif/README.md
diff --git a/3b_parameters/MODIS_MCD12Q1_V6/6_convert_to_tif/convert_vrt_to_tif.sh b/dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/6_convert_to_tif/convert_vrt_to_tif.sh
similarity index 100%
rename from 3b_parameters/MODIS_MCD12Q1_V6/6_convert_to_tif/convert_vrt_to_tif.sh
rename to dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/6_convert_to_tif/convert_vrt_to_tif.sh
diff --git a/3b_parameters/MODIS_MCD12Q1_V6/7_find_mode_land_class/README.md b/dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/7_find_mode_land_class/README.md
similarity index 100%
rename from 3b_parameters/MODIS_MCD12Q1_V6/7_find_mode_land_class/README.md
rename to dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/7_find_mode_land_class/README.md
diff --git a/3b_parameters/MODIS_MCD12Q1_V6/7_find_mode_land_class/find_mode_landclass.ipynb b/dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/7_find_mode_land_class/find_mode_landclass.ipynb
similarity index 100%
rename from 3b_parameters/MODIS_MCD12Q1_V6/7_find_mode_land_class/find_mode_landclass.ipynb
rename to dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/7_find_mode_land_class/find_mode_landclass.ipynb
diff --git a/3b_parameters/MODIS_MCD12Q1_V6/7_find_mode_land_class/find_mode_landclass.py b/dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/7_find_mode_land_class/find_mode_landclass.py
similarity index 100%
rename from 3b_parameters/MODIS_MCD12Q1_V6/7_find_mode_land_class/find_mode_landclass.py
rename to dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/7_find_mode_land_class/find_mode_landclass.py
diff --git a/3b_parameters/MODIS_MCD12Q1_V6/README.md b/dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/README.md
similarity index 100%
rename from 3b_parameters/MODIS_MCD12Q1_V6/README.md
rename to dependencies/cwarhm-summa/3b_parameters/MODIS_MCD12Q1_V6/README.md
diff --git a/3b_parameters/README.md b/dependencies/cwarhm-summa/3b_parameters/README.md
similarity index 100%
rename from 3b_parameters/README.md
rename to dependencies/cwarhm-summa/3b_parameters/README.md
diff --git a/3b_parameters/SOILGRIDS/1_download/README.md b/dependencies/cwarhm-summa/3b_parameters/SOILGRIDS/1_download/README.md
similarity index 100%
rename from 3b_parameters/SOILGRIDS/1_download/README.md
rename to dependencies/cwarhm-summa/3b_parameters/SOILGRIDS/1_download/README.md
diff --git a/3b_parameters/SOILGRIDS/1_download/download_soilclass_global_map.ipynb b/dependencies/cwarhm-summa/3b_parameters/SOILGRIDS/1_download/download_soilclass_global_map.ipynb
similarity index 100%
rename from 3b_parameters/SOILGRIDS/1_download/download_soilclass_global_map.ipynb
rename to dependencies/cwarhm-summa/3b_parameters/SOILGRIDS/1_download/download_soilclass_global_map.ipynb
diff --git a/3b_parameters/SOILGRIDS/1_download/download_soilclass_global_map.py b/dependencies/cwarhm-summa/3b_parameters/SOILGRIDS/1_download/download_soilclass_global_map.py
similarity index 100%
rename from 3b_parameters/SOILGRIDS/1_download/download_soilclass_global_map.py
rename to dependencies/cwarhm-summa/3b_parameters/SOILGRIDS/1_download/download_soilclass_global_map.py
diff --git a/3b_parameters/SOILGRIDS/2_extract_domain/README.md b/dependencies/cwarhm-summa/3b_parameters/SOILGRIDS/2_extract_domain/README.md
similarity index 100%
rename from 3b_parameters/SOILGRIDS/2_extract_domain/README.md
rename to dependencies/cwarhm-summa/3b_parameters/SOILGRIDS/2_extract_domain/README.md
diff --git a/3b_parameters/SOILGRIDS/2_extract_domain/extract_domain.ipynb b/dependencies/cwarhm-summa/3b_parameters/SOILGRIDS/2_extract_domain/extract_domain.ipynb
similarity index 100%
rename from 3b_parameters/SOILGRIDS/2_extract_domain/extract_domain.ipynb
rename to dependencies/cwarhm-summa/3b_parameters/SOILGRIDS/2_extract_domain/extract_domain.ipynb
diff --git a/3b_parameters/SOILGRIDS/2_extract_domain/extract_domain.py b/dependencies/cwarhm-summa/3b_parameters/SOILGRIDS/2_extract_domain/extract_domain.py
similarity index 100%
rename from 3b_parameters/SOILGRIDS/2_extract_domain/extract_domain.py
rename to dependencies/cwarhm-summa/3b_parameters/SOILGRIDS/2_extract_domain/extract_domain.py
diff --git a/3b_parameters/SOILGRIDS/README.md b/dependencies/cwarhm-summa/3b_parameters/SOILGRIDS/README.md
similarity index 100%
rename from 3b_parameters/SOILGRIDS/README.md
rename to dependencies/cwarhm-summa/3b_parameters/SOILGRIDS/README.md
diff --git a/4a_sort_shape/1_sort_catchment_shape.ipynb b/dependencies/cwarhm-summa/4a_sort_shape/1_sort_catchment_shape.ipynb
similarity index 100%
rename from 4a_sort_shape/1_sort_catchment_shape.ipynb
rename to dependencies/cwarhm-summa/4a_sort_shape/1_sort_catchment_shape.ipynb
diff --git a/4a_sort_shape/1_sort_catchment_shape.py b/dependencies/cwarhm-summa/4a_sort_shape/1_sort_catchment_shape.py
similarity index 100%
rename from 4a_sort_shape/1_sort_catchment_shape.py
rename to dependencies/cwarhm-summa/4a_sort_shape/1_sort_catchment_shape.py
diff --git a/4a_sort_shape/README.md b/dependencies/cwarhm-summa/4a_sort_shape/README.md
similarity index 100%
rename from 4a_sort_shape/README.md
rename to dependencies/cwarhm-summa/4a_sort_shape/README.md
diff --git a/4b_remapping/1_topo/1_find_HRU_elevation.ipynb b/dependencies/cwarhm-summa/4b_remapping/1_topo/1_find_HRU_elevation.ipynb
similarity index 100%
rename from 4b_remapping/1_topo/1_find_HRU_elevation.ipynb
rename to dependencies/cwarhm-summa/4b_remapping/1_topo/1_find_HRU_elevation.ipynb
diff --git a/4b_remapping/1_topo/1_find_HRU_elevation.py b/dependencies/cwarhm-summa/4b_remapping/1_topo/1_find_HRU_elevation.py
similarity index 99%
rename from 4b_remapping/1_topo/1_find_HRU_elevation.py
rename to dependencies/cwarhm-summa/4b_remapping/1_topo/1_find_HRU_elevation.py
index ccd917e..4d5fc48 100644
--- a/4b_remapping/1_topo/1_find_HRU_elevation.py
+++ b/dependencies/cwarhm-summa/4b_remapping/1_topo/1_find_HRU_elevation.py
@@ -147,7 +147,7 @@ def make_default_path(suffix):
stats=QgsZonalStatistics.Mean).calculateStatistics(None)
# Clean memory
-qgs.exitQgis()
+#qgs.exitQgis()
# --- Code provenance
diff --git a/4b_remapping/1_topo/2_find_HRU_soil_classes.ipynb b/dependencies/cwarhm-summa/4b_remapping/1_topo/2_find_HRU_soil_classes.ipynb
similarity index 100%
rename from 4b_remapping/1_topo/2_find_HRU_soil_classes.ipynb
rename to dependencies/cwarhm-summa/4b_remapping/1_topo/2_find_HRU_soil_classes.ipynb
diff --git a/4b_remapping/1_topo/2_find_HRU_soil_classes.py b/dependencies/cwarhm-summa/4b_remapping/1_topo/2_find_HRU_soil_classes.py
similarity index 100%
rename from 4b_remapping/1_topo/2_find_HRU_soil_classes.py
rename to dependencies/cwarhm-summa/4b_remapping/1_topo/2_find_HRU_soil_classes.py
diff --git a/4b_remapping/1_topo/3_find_HRU_land_classes.ipynb b/dependencies/cwarhm-summa/4b_remapping/1_topo/3_find_HRU_land_classes.ipynb
similarity index 100%
rename from 4b_remapping/1_topo/3_find_HRU_land_classes.ipynb
rename to dependencies/cwarhm-summa/4b_remapping/1_topo/3_find_HRU_land_classes.ipynb
diff --git a/4b_remapping/1_topo/3_find_HRU_land_classes.py b/dependencies/cwarhm-summa/4b_remapping/1_topo/3_find_HRU_land_classes.py
similarity index 100%
rename from 4b_remapping/1_topo/3_find_HRU_land_classes.py
rename to dependencies/cwarhm-summa/4b_remapping/1_topo/3_find_HRU_land_classes.py
diff --git a/4b_remapping/1_topo/README.md b/dependencies/cwarhm-summa/4b_remapping/1_topo/README.md
similarity index 100%
rename from 4b_remapping/1_topo/README.md
rename to dependencies/cwarhm-summa/4b_remapping/1_topo/README.md
diff --git a/4b_remapping/2_forcing/1_make_one_weighted_forcing_file.ipynb b/dependencies/cwarhm-summa/4b_remapping/2_forcing/1_make_one_weighted_forcing_file.ipynb
similarity index 100%
rename from 4b_remapping/2_forcing/1_make_one_weighted_forcing_file.ipynb
rename to dependencies/cwarhm-summa/4b_remapping/2_forcing/1_make_one_weighted_forcing_file.ipynb
diff --git a/4b_remapping/2_forcing/1_make_one_weighted_forcing_file.py b/dependencies/cwarhm-summa/4b_remapping/2_forcing/1_make_one_weighted_forcing_file.py
similarity index 100%
rename from 4b_remapping/2_forcing/1_make_one_weighted_forcing_file.py
rename to dependencies/cwarhm-summa/4b_remapping/2_forcing/1_make_one_weighted_forcing_file.py
diff --git a/4b_remapping/2_forcing/2_make_all_weighted_forcing_files.ipynb b/dependencies/cwarhm-summa/4b_remapping/2_forcing/2_make_all_weighted_forcing_files.ipynb
similarity index 100%
rename from 4b_remapping/2_forcing/2_make_all_weighted_forcing_files.ipynb
rename to dependencies/cwarhm-summa/4b_remapping/2_forcing/2_make_all_weighted_forcing_files.ipynb
diff --git a/4b_remapping/2_forcing/2_make_all_weighted_forcing_files.py b/dependencies/cwarhm-summa/4b_remapping/2_forcing/2_make_all_weighted_forcing_files.py
similarity index 100%
rename from 4b_remapping/2_forcing/2_make_all_weighted_forcing_files.py
rename to dependencies/cwarhm-summa/4b_remapping/2_forcing/2_make_all_weighted_forcing_files.py
diff --git a/4b_remapping/2_forcing/3_temperature_lapsing_and_datastep.ipynb b/dependencies/cwarhm-summa/4b_remapping/2_forcing/3_temperature_lapsing_and_datastep.ipynb
similarity index 100%
rename from 4b_remapping/2_forcing/3_temperature_lapsing_and_datastep.ipynb
rename to dependencies/cwarhm-summa/4b_remapping/2_forcing/3_temperature_lapsing_and_datastep.ipynb
diff --git a/4b_remapping/2_forcing/3_temperature_lapsing_and_datastep.py b/dependencies/cwarhm-summa/4b_remapping/2_forcing/3_temperature_lapsing_and_datastep.py
similarity index 100%
rename from 4b_remapping/2_forcing/3_temperature_lapsing_and_datastep.py
rename to dependencies/cwarhm-summa/4b_remapping/2_forcing/3_temperature_lapsing_and_datastep.py
diff --git a/4b_remapping/2_forcing/README.md b/dependencies/cwarhm-summa/4b_remapping/2_forcing/README.md
similarity index 100%
rename from 4b_remapping/2_forcing/README.md
rename to dependencies/cwarhm-summa/4b_remapping/2_forcing/README.md
diff --git a/4b_remapping/README.md b/dependencies/cwarhm-summa/4b_remapping/README.md
similarity index 100%
rename from 4b_remapping/README.md
rename to dependencies/cwarhm-summa/4b_remapping/README.md
diff --git a/5_model_input/README.md b/dependencies/cwarhm-summa/5_model_input/README.md
similarity index 100%
rename from 5_model_input/README.md
rename to dependencies/cwarhm-summa/5_model_input/README.md
diff --git a/5_model_input/SUMMA/0_base_settings/README.md b/dependencies/cwarhm-summa/5_model_input/SUMMA/0_base_settings/README.md
similarity index 100%
rename from 5_model_input/SUMMA/0_base_settings/README.md
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/0_base_settings/README.md
diff --git a/5_model_input/SUMMA/0_base_settings/TBL_GENPARM.TBL b/dependencies/cwarhm-summa/5_model_input/SUMMA/0_base_settings/TBL_GENPARM.TBL
similarity index 100%
rename from 5_model_input/SUMMA/0_base_settings/TBL_GENPARM.TBL
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/0_base_settings/TBL_GENPARM.TBL
diff --git a/5_model_input/SUMMA/0_base_settings/TBL_MPTABLE.TBL b/dependencies/cwarhm-summa/5_model_input/SUMMA/0_base_settings/TBL_MPTABLE.TBL
similarity index 100%
rename from 5_model_input/SUMMA/0_base_settings/TBL_MPTABLE.TBL
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/0_base_settings/TBL_MPTABLE.TBL
diff --git a/5_model_input/SUMMA/0_base_settings/TBL_SOILPARM.TBL b/dependencies/cwarhm-summa/5_model_input/SUMMA/0_base_settings/TBL_SOILPARM.TBL
similarity index 100%
rename from 5_model_input/SUMMA/0_base_settings/TBL_SOILPARM.TBL
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/0_base_settings/TBL_SOILPARM.TBL
diff --git a/5_model_input/SUMMA/0_base_settings/TBL_VEGPARM.TBL b/dependencies/cwarhm-summa/5_model_input/SUMMA/0_base_settings/TBL_VEGPARM.TBL
similarity index 100%
rename from 5_model_input/SUMMA/0_base_settings/TBL_VEGPARM.TBL
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/0_base_settings/TBL_VEGPARM.TBL
diff --git a/5_model_input/SUMMA/0_base_settings/basinParamInfo.txt b/dependencies/cwarhm-summa/5_model_input/SUMMA/0_base_settings/basinParamInfo.txt
similarity index 100%
rename from 5_model_input/SUMMA/0_base_settings/basinParamInfo.txt
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/0_base_settings/basinParamInfo.txt
diff --git a/5_model_input/SUMMA/0_base_settings/localParamInfo.txt b/dependencies/cwarhm-summa/5_model_input/SUMMA/0_base_settings/localParamInfo.txt
similarity index 100%
rename from 5_model_input/SUMMA/0_base_settings/localParamInfo.txt
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/0_base_settings/localParamInfo.txt
diff --git a/5_model_input/SUMMA/0_base_settings/modelDecisions.txt b/dependencies/cwarhm-summa/5_model_input/SUMMA/0_base_settings/modelDecisions.txt
similarity index 100%
rename from 5_model_input/SUMMA/0_base_settings/modelDecisions.txt
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/0_base_settings/modelDecisions.txt
diff --git a/5_model_input/SUMMA/0_base_settings/outputControl.txt b/dependencies/cwarhm-summa/5_model_input/SUMMA/0_base_settings/outputControl.txt
similarity index 100%
rename from 5_model_input/SUMMA/0_base_settings/outputControl.txt
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/0_base_settings/outputControl.txt
diff --git a/5_model_input/SUMMA/1a_copy_base_settings/1_copy_base_settings.ipynb b/dependencies/cwarhm-summa/5_model_input/SUMMA/1a_copy_base_settings/1_copy_base_settings.ipynb
similarity index 100%
rename from 5_model_input/SUMMA/1a_copy_base_settings/1_copy_base_settings.ipynb
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/1a_copy_base_settings/1_copy_base_settings.ipynb
diff --git a/5_model_input/SUMMA/1a_copy_base_settings/1_copy_base_settings.py b/dependencies/cwarhm-summa/5_model_input/SUMMA/1a_copy_base_settings/1_copy_base_settings.py
similarity index 100%
rename from 5_model_input/SUMMA/1a_copy_base_settings/1_copy_base_settings.py
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/1a_copy_base_settings/1_copy_base_settings.py
diff --git a/5_model_input/SUMMA/1a_copy_base_settings/README.md b/dependencies/cwarhm-summa/5_model_input/SUMMA/1a_copy_base_settings/README.md
similarity index 100%
rename from 5_model_input/SUMMA/1a_copy_base_settings/README.md
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/1a_copy_base_settings/README.md
diff --git a/5_model_input/SUMMA/1b_file_manager/1_create_file_manager.ipynb b/dependencies/cwarhm-summa/5_model_input/SUMMA/1b_file_manager/1_create_file_manager.ipynb
similarity index 100%
rename from 5_model_input/SUMMA/1b_file_manager/1_create_file_manager.ipynb
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/1b_file_manager/1_create_file_manager.ipynb
diff --git a/5_model_input/SUMMA/1b_file_manager/1_create_file_manager.py b/dependencies/cwarhm-summa/5_model_input/SUMMA/1b_file_manager/1_create_file_manager.py
similarity index 100%
rename from 5_model_input/SUMMA/1b_file_manager/1_create_file_manager.py
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/1b_file_manager/1_create_file_manager.py
diff --git a/5_model_input/SUMMA/1b_file_manager/README.md b/dependencies/cwarhm-summa/5_model_input/SUMMA/1b_file_manager/README.md
similarity index 100%
rename from 5_model_input/SUMMA/1b_file_manager/README.md
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/1b_file_manager/README.md
diff --git a/5_model_input/SUMMA/1c_forcing_file_list/1_create_forcing_file_list.ipynb b/dependencies/cwarhm-summa/5_model_input/SUMMA/1c_forcing_file_list/1_create_forcing_file_list.ipynb
similarity index 100%
rename from 5_model_input/SUMMA/1c_forcing_file_list/1_create_forcing_file_list.ipynb
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/1c_forcing_file_list/1_create_forcing_file_list.ipynb
diff --git a/5_model_input/SUMMA/1c_forcing_file_list/1_create_forcing_file_list.py b/dependencies/cwarhm-summa/5_model_input/SUMMA/1c_forcing_file_list/1_create_forcing_file_list.py
similarity index 100%
rename from 5_model_input/SUMMA/1c_forcing_file_list/1_create_forcing_file_list.py
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/1c_forcing_file_list/1_create_forcing_file_list.py
diff --git a/5_model_input/SUMMA/1c_forcing_file_list/README.md b/dependencies/cwarhm-summa/5_model_input/SUMMA/1c_forcing_file_list/README.md
similarity index 100%
rename from 5_model_input/SUMMA/1c_forcing_file_list/README.md
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/1c_forcing_file_list/README.md
diff --git a/5_model_input/SUMMA/1d_initial_conditions/1_create_coldState.ipynb b/dependencies/cwarhm-summa/5_model_input/SUMMA/1d_initial_conditions/1_create_coldState.ipynb
similarity index 100%
rename from 5_model_input/SUMMA/1d_initial_conditions/1_create_coldState.ipynb
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/1d_initial_conditions/1_create_coldState.ipynb
diff --git a/5_model_input/SUMMA/1d_initial_conditions/1_create_coldState.py b/dependencies/cwarhm-summa/5_model_input/SUMMA/1d_initial_conditions/1_create_coldState.py
similarity index 100%
rename from 5_model_input/SUMMA/1d_initial_conditions/1_create_coldState.py
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/1d_initial_conditions/1_create_coldState.py
diff --git a/5_model_input/SUMMA/1d_initial_conditions/README.md b/dependencies/cwarhm-summa/5_model_input/SUMMA/1d_initial_conditions/README.md
similarity index 100%
rename from 5_model_input/SUMMA/1d_initial_conditions/README.md
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/1d_initial_conditions/README.md
diff --git a/5_model_input/SUMMA/1e_trial_parameters/1_create_trialParams.ipynb b/dependencies/cwarhm-summa/5_model_input/SUMMA/1e_trial_parameters/1_create_trialParams.ipynb
similarity index 100%
rename from 5_model_input/SUMMA/1e_trial_parameters/1_create_trialParams.ipynb
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/1e_trial_parameters/1_create_trialParams.ipynb
diff --git a/5_model_input/SUMMA/1e_trial_parameters/1_create_trialParams.py b/dependencies/cwarhm-summa/5_model_input/SUMMA/1e_trial_parameters/1_create_trialParams.py
similarity index 100%
rename from 5_model_input/SUMMA/1e_trial_parameters/1_create_trialParams.py
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/1e_trial_parameters/1_create_trialParams.py
diff --git a/5_model_input/SUMMA/1e_trial_parameters/README.md b/dependencies/cwarhm-summa/5_model_input/SUMMA/1e_trial_parameters/README.md
similarity index 100%
rename from 5_model_input/SUMMA/1e_trial_parameters/README.md
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/1e_trial_parameters/README.md
diff --git a/5_model_input/SUMMA/1f_attributes/1_initialize_attributes_nc.ipynb b/dependencies/cwarhm-summa/5_model_input/SUMMA/1f_attributes/1_initialize_attributes_nc.ipynb
similarity index 100%
rename from 5_model_input/SUMMA/1f_attributes/1_initialize_attributes_nc.ipynb
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/1f_attributes/1_initialize_attributes_nc.ipynb
diff --git a/5_model_input/SUMMA/1f_attributes/1_initialize_attributes_nc.py b/dependencies/cwarhm-summa/5_model_input/SUMMA/1f_attributes/1_initialize_attributes_nc.py
similarity index 100%
rename from 5_model_input/SUMMA/1f_attributes/1_initialize_attributes_nc.py
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/1f_attributes/1_initialize_attributes_nc.py
diff --git a/5_model_input/SUMMA/1f_attributes/2a_insert_soilclass_from_hist_into_attributes.ipynb b/dependencies/cwarhm-summa/5_model_input/SUMMA/1f_attributes/2a_insert_soilclass_from_hist_into_attributes.ipynb
similarity index 100%
rename from 5_model_input/SUMMA/1f_attributes/2a_insert_soilclass_from_hist_into_attributes.ipynb
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/1f_attributes/2a_insert_soilclass_from_hist_into_attributes.ipynb
diff --git a/5_model_input/SUMMA/1f_attributes/2a_insert_soilclass_from_hist_into_attributes.py b/dependencies/cwarhm-summa/5_model_input/SUMMA/1f_attributes/2a_insert_soilclass_from_hist_into_attributes.py
similarity index 100%
rename from 5_model_input/SUMMA/1f_attributes/2a_insert_soilclass_from_hist_into_attributes.py
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/1f_attributes/2a_insert_soilclass_from_hist_into_attributes.py
diff --git a/5_model_input/SUMMA/1f_attributes/2b_insert_landclass_from_hist_into_attributes.ipynb b/dependencies/cwarhm-summa/5_model_input/SUMMA/1f_attributes/2b_insert_landclass_from_hist_into_attributes.ipynb
similarity index 100%
rename from 5_model_input/SUMMA/1f_attributes/2b_insert_landclass_from_hist_into_attributes.ipynb
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/1f_attributes/2b_insert_landclass_from_hist_into_attributes.ipynb
diff --git a/5_model_input/SUMMA/1f_attributes/2b_insert_landclass_from_hist_into_attributes.py b/dependencies/cwarhm-summa/5_model_input/SUMMA/1f_attributes/2b_insert_landclass_from_hist_into_attributes.py
similarity index 100%
rename from 5_model_input/SUMMA/1f_attributes/2b_insert_landclass_from_hist_into_attributes.py
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/1f_attributes/2b_insert_landclass_from_hist_into_attributes.py
diff --git a/5_model_input/SUMMA/1f_attributes/2c_insert_elevation_into_attributes.ipynb b/dependencies/cwarhm-summa/5_model_input/SUMMA/1f_attributes/2c_insert_elevation_into_attributes.ipynb
similarity index 100%
rename from 5_model_input/SUMMA/1f_attributes/2c_insert_elevation_into_attributes.ipynb
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/1f_attributes/2c_insert_elevation_into_attributes.ipynb
diff --git a/5_model_input/SUMMA/1f_attributes/2c_insert_elevation_into_attributes.py b/dependencies/cwarhm-summa/5_model_input/SUMMA/1f_attributes/2c_insert_elevation_into_attributes.py
similarity index 100%
rename from 5_model_input/SUMMA/1f_attributes/2c_insert_elevation_into_attributes.py
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/1f_attributes/2c_insert_elevation_into_attributes.py
diff --git a/5_model_input/SUMMA/1f_attributes/README.md b/dependencies/cwarhm-summa/5_model_input/SUMMA/1f_attributes/README.md
similarity index 100%
rename from 5_model_input/SUMMA/1f_attributes/README.md
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/1f_attributes/README.md
diff --git a/5_model_input/SUMMA/README.md b/dependencies/cwarhm-summa/5_model_input/SUMMA/README.md
similarity index 100%
rename from 5_model_input/SUMMA/README.md
rename to dependencies/cwarhm-summa/5_model_input/SUMMA/README.md
diff --git a/5_model_input/mizuRoute/0_base_settings/README.md b/dependencies/cwarhm-summa/5_model_input/mizuRoute/0_base_settings/README.md
similarity index 100%
rename from 5_model_input/mizuRoute/0_base_settings/README.md
rename to dependencies/cwarhm-summa/5_model_input/mizuRoute/0_base_settings/README.md
diff --git a/5_model_input/mizuRoute/0_base_settings/param.nml.default b/dependencies/cwarhm-summa/5_model_input/mizuRoute/0_base_settings/param.nml.default
similarity index 100%
rename from 5_model_input/mizuRoute/0_base_settings/param.nml.default
rename to dependencies/cwarhm-summa/5_model_input/mizuRoute/0_base_settings/param.nml.default
diff --git a/5_model_input/mizuRoute/1a_copy_base_settings/1_copy_base_settings.ipynb b/dependencies/cwarhm-summa/5_model_input/mizuRoute/1a_copy_base_settings/1_copy_base_settings.ipynb
similarity index 100%
rename from 5_model_input/mizuRoute/1a_copy_base_settings/1_copy_base_settings.ipynb
rename to dependencies/cwarhm-summa/5_model_input/mizuRoute/1a_copy_base_settings/1_copy_base_settings.ipynb
diff --git a/5_model_input/mizuRoute/1a_copy_base_settings/1_copy_base_settings.py b/dependencies/cwarhm-summa/5_model_input/mizuRoute/1a_copy_base_settings/1_copy_base_settings.py
similarity index 100%
rename from 5_model_input/mizuRoute/1a_copy_base_settings/1_copy_base_settings.py
rename to dependencies/cwarhm-summa/5_model_input/mizuRoute/1a_copy_base_settings/1_copy_base_settings.py
diff --git a/5_model_input/mizuRoute/1a_copy_base_settings/README.md b/dependencies/cwarhm-summa/5_model_input/mizuRoute/1a_copy_base_settings/README.md
similarity index 100%
rename from 5_model_input/mizuRoute/1a_copy_base_settings/README.md
rename to dependencies/cwarhm-summa/5_model_input/mizuRoute/1a_copy_base_settings/README.md
diff --git a/5_model_input/mizuRoute/1b_network_topology_file/1_create_network_topology_file.ipynb b/dependencies/cwarhm-summa/5_model_input/mizuRoute/1b_network_topology_file/1_create_network_topology_file.ipynb
similarity index 100%
rename from 5_model_input/mizuRoute/1b_network_topology_file/1_create_network_topology_file.ipynb
rename to dependencies/cwarhm-summa/5_model_input/mizuRoute/1b_network_topology_file/1_create_network_topology_file.ipynb
diff --git a/5_model_input/mizuRoute/1b_network_topology_file/1_create_network_topology_file.py b/dependencies/cwarhm-summa/5_model_input/mizuRoute/1b_network_topology_file/1_create_network_topology_file.py
similarity index 100%
rename from 5_model_input/mizuRoute/1b_network_topology_file/1_create_network_topology_file.py
rename to dependencies/cwarhm-summa/5_model_input/mizuRoute/1b_network_topology_file/1_create_network_topology_file.py
diff --git a/5_model_input/mizuRoute/1b_network_topology_file/README.md b/dependencies/cwarhm-summa/5_model_input/mizuRoute/1b_network_topology_file/README.md
similarity index 100%
rename from 5_model_input/mizuRoute/1b_network_topology_file/README.md
rename to dependencies/cwarhm-summa/5_model_input/mizuRoute/1b_network_topology_file/README.md
diff --git a/5_model_input/mizuRoute/1c_optional_remapping_file/1_remap_summa_catchments_to_routing.ipynb b/dependencies/cwarhm-summa/5_model_input/mizuRoute/1c_optional_remapping_file/1_remap_summa_catchments_to_routing.ipynb
similarity index 100%
rename from 5_model_input/mizuRoute/1c_optional_remapping_file/1_remap_summa_catchments_to_routing.ipynb
rename to dependencies/cwarhm-summa/5_model_input/mizuRoute/1c_optional_remapping_file/1_remap_summa_catchments_to_routing.ipynb
diff --git a/5_model_input/mizuRoute/1c_optional_remapping_file/1_remap_summa_catchments_to_routing.py b/dependencies/cwarhm-summa/5_model_input/mizuRoute/1c_optional_remapping_file/1_remap_summa_catchments_to_routing.py
similarity index 100%
rename from 5_model_input/mizuRoute/1c_optional_remapping_file/1_remap_summa_catchments_to_routing.py
rename to dependencies/cwarhm-summa/5_model_input/mizuRoute/1c_optional_remapping_file/1_remap_summa_catchments_to_routing.py
diff --git a/5_model_input/mizuRoute/1c_optional_remapping_file/README.md b/dependencies/cwarhm-summa/5_model_input/mizuRoute/1c_optional_remapping_file/README.md
similarity index 100%
rename from 5_model_input/mizuRoute/1c_optional_remapping_file/README.md
rename to dependencies/cwarhm-summa/5_model_input/mizuRoute/1c_optional_remapping_file/README.md
diff --git a/5_model_input/mizuRoute/1d_control_file/1_create_control_file.ipynb b/dependencies/cwarhm-summa/5_model_input/mizuRoute/1d_control_file/1_create_control_file.ipynb
similarity index 100%
rename from 5_model_input/mizuRoute/1d_control_file/1_create_control_file.ipynb
rename to dependencies/cwarhm-summa/5_model_input/mizuRoute/1d_control_file/1_create_control_file.ipynb
diff --git a/5_model_input/mizuRoute/1d_control_file/1_create_control_file.py b/dependencies/cwarhm-summa/5_model_input/mizuRoute/1d_control_file/1_create_control_file.py
similarity index 100%
rename from 5_model_input/mizuRoute/1d_control_file/1_create_control_file.py
rename to dependencies/cwarhm-summa/5_model_input/mizuRoute/1d_control_file/1_create_control_file.py
diff --git a/5_model_input/mizuRoute/1d_control_file/README.md b/dependencies/cwarhm-summa/5_model_input/mizuRoute/1d_control_file/README.md
similarity index 100%
rename from 5_model_input/mizuRoute/1d_control_file/README.md
rename to dependencies/cwarhm-summa/5_model_input/mizuRoute/1d_control_file/README.md
diff --git a/5_model_input/mizuRoute/README.md b/dependencies/cwarhm-summa/5_model_input/mizuRoute/README.md
similarity index 100%
rename from 5_model_input/mizuRoute/README.md
rename to dependencies/cwarhm-summa/5_model_input/mizuRoute/README.md
diff --git a/6_model_runs/1_run_summa.sh b/dependencies/cwarhm-summa/6_model_runs/1_run_summa.sh
similarity index 100%
rename from 6_model_runs/1_run_summa.sh
rename to dependencies/cwarhm-summa/6_model_runs/1_run_summa.sh
diff --git a/6_model_runs/1_run_summa_as_array.sh b/dependencies/cwarhm-summa/6_model_runs/1_run_summa_as_array.sh
similarity index 100%
rename from 6_model_runs/1_run_summa_as_array.sh
rename to dependencies/cwarhm-summa/6_model_runs/1_run_summa_as_array.sh
diff --git a/6_model_runs/2_run_mizuRoute.sh b/dependencies/cwarhm-summa/6_model_runs/2_run_mizuRoute.sh
similarity index 100%
rename from 6_model_runs/2_run_mizuRoute.sh
rename to dependencies/cwarhm-summa/6_model_runs/2_run_mizuRoute.sh
diff --git a/6_model_runs/README.md b/dependencies/cwarhm-summa/6_model_runs/README.md
similarity index 100%
rename from 6_model_runs/README.md
rename to dependencies/cwarhm-summa/6_model_runs/README.md
diff --git a/7_visualization/1_mizuRoute_and_summa_shapefiles.ipynb b/dependencies/cwarhm-summa/7_visualization/1_mizuRoute_and_summa_shapefiles.ipynb
similarity index 100%
rename from 7_visualization/1_mizuRoute_and_summa_shapefiles.ipynb
rename to dependencies/cwarhm-summa/7_visualization/1_mizuRoute_and_summa_shapefiles.ipynb
diff --git a/7_visualization/2_ERA5_download_coordinates_and_catchment_shapefile.ipynb b/dependencies/cwarhm-summa/7_visualization/2_ERA5_download_coordinates_and_catchment_shapefile.ipynb
similarity index 100%
rename from 7_visualization/2_ERA5_download_coordinates_and_catchment_shapefile.ipynb
rename to dependencies/cwarhm-summa/7_visualization/2_ERA5_download_coordinates_and_catchment_shapefile.ipynb
diff --git a/7_visualization/3_forcing_grid_vs_catchment_averaged.ipynb b/dependencies/cwarhm-summa/7_visualization/3_forcing_grid_vs_catchment_averaged.ipynb
similarity index 100%
rename from 7_visualization/3_forcing_grid_vs_catchment_averaged.ipynb
rename to dependencies/cwarhm-summa/7_visualization/3_forcing_grid_vs_catchment_averaged.ipynb
diff --git a/7_visualization/4_temperature_lapse_rates.ipynb b/dependencies/cwarhm-summa/7_visualization/4_temperature_lapse_rates.ipynb
similarity index 100%
rename from 7_visualization/4_temperature_lapse_rates.ipynb
rename to dependencies/cwarhm-summa/7_visualization/4_temperature_lapse_rates.ipynb
diff --git a/7_visualization/5_geospatial_parameters_to_model_elements.ipynb b/dependencies/cwarhm-summa/7_visualization/5_geospatial_parameters_to_model_elements.ipynb
similarity index 100%
rename from 7_visualization/5_geospatial_parameters_to_model_elements.ipynb
rename to dependencies/cwarhm-summa/7_visualization/5_geospatial_parameters_to_model_elements.ipynb
diff --git a/7_visualization/6_SWE_SM_ET_Q_per_GRU.ipynb b/dependencies/cwarhm-summa/7_visualization/6_SWE_SM_ET_Q_per_GRU.ipynb
similarity index 100%
rename from 7_visualization/6_SWE_SM_ET_Q_per_GRU.ipynb
rename to dependencies/cwarhm-summa/7_visualization/6_SWE_SM_ET_Q_per_GRU.ipynb
diff --git a/7_visualization/7_SWE_and_streamflow_per_HRU.ipynb b/dependencies/cwarhm-summa/7_visualization/7_SWE_and_streamflow_per_HRU.ipynb
similarity index 100%
rename from 7_visualization/7_SWE_and_streamflow_per_HRU.ipynb
rename to dependencies/cwarhm-summa/7_visualization/7_SWE_and_streamflow_per_HRU.ipynb
diff --git a/7_visualization/README.md b/dependencies/cwarhm-summa/7_visualization/README.md
similarity index 100%
rename from 7_visualization/README.md
rename to dependencies/cwarhm-summa/7_visualization/README.md
diff --git a/dependencies/cwarhm-summa/LICENSE.md b/dependencies/cwarhm-summa/LICENSE.md
new file mode 100644
index 0000000..f288702
--- /dev/null
+++ b/dependencies/cwarhm-summa/LICENSE.md
@@ -0,0 +1,674 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users. We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors. You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights. Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received. You must make sure that they, too, receive
+or can get the source code. And you must show them these terms so they
+know their rights.
+
+ Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+ For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software. For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+ Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so. This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software. The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable. Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products. If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+ Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary. To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Use with the GNU Affero General Public License.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see .
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+ Copyright (C)
+ This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+.
+
+ The GNU General Public License does not permit incorporating your program
+into proprietary programs. If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License. But first, please read
+.
diff --git a/README.md b/dependencies/cwarhm-summa/README.md
similarity index 100%
rename from README.md
rename to dependencies/cwarhm-summa/README.md
diff --git a/dependencies/cwarhm-summa/environment.yml b/dependencies/cwarhm-summa/environment.yml
new file mode 100644
index 0000000..4a5186c
--- /dev/null
+++ b/dependencies/cwarhm-summa/environment.yml
@@ -0,0 +1,208 @@
+name: summa-env
+channels:
+ - conda-forge
+ - defaults
+dependencies:
+ - affine=2.3.0=py_0
+ - attrs=20.3.0=pyhd3deb0d_0
+ - backcall=0.2.0=pyh9f0ad1d_0
+ - backports=1.0=py_2
+ - backports.functools_lru_cache=1.6.4=pyhd8ed1ab_0
+ - blinker=1.4=py_1
+ - boost-cpp=1.74.0=h54f0996_2
+ - brotlipy=0.7.0=py38h294d835_1001
+ - bzip2=1.0.8=h8ffe710_4
+ - ca-certificates=2020.12.5=h5b45459_0
+ - cairo=1.16.0=hb19e0ff_1008
+ - cdsapi=0.5.1=pyhd8ed1ab_0
+ - certifi=2020.12.5=py38haa244fe_1
+ - cffi=1.14.5=py38hd8c33c5_0
+ - cfitsio=3.470=h0af3d06_7
+ - cftime=1.4.1=py38h347fdf6_0
+ - chardet=4.0.0=py38haa244fe_1
+ - click=7.1.2=pyh9f0ad1d_0
+ - click-plugins=1.1.1=py_0
+ - cligj=0.7.1=pyhd8ed1ab_0
+ - colorama=0.4.4=pyh9f0ad1d_0
+ - coverage=5.5=py38h294d835_0
+ - cryptography=3.4.7=py38hd7da0ea_0
+ - curl=7.76.1=hf1763fc_1
+ - cycler=0.10.0=py_2
+ - exiv2=0.27.1=hfa2c1cf_0
+ - expat=2.3.0=h39d44d4_0
+ - fiona=1.8.18=py38h91b197a_1
+ - fontconfig=2.13.1=h1989441_1005
+ - freetype=2.10.4=h546665d_1
+ - freexl=1.0.6=ha8e266a_0
+ - future=0.18.2=py38haa244fe_3
+ - gdal=3.2.1=py38hb9e1232_7
+ - geopandas=0.9.0=pyhd8ed1ab_0
+ - geos=3.9.1=h39d44d4_2
+ - geotiff=1.6.0=hee96dd5_4
+ - gettext=0.19.8.1=h1a89ca6_1005
+ - gsl=2.6=hdfb1a43_2
+ - hdf4=4.2.13=h0e5069d_1005
+ - hdf5=1.10.6=nompi_h5268f04_1114
+ - hs_restclient=1.3.7=pyh9f0ad1d_0
+ - httplib2=0.19.1=pyhd8ed1ab_0
+ - icu=68.1=h0e60522_0
+ - idna=2.10=pyh9f0ad1d_0
+ - intel-openmp=2021.2.0=h57928b3_616
+ - ipykernel=5.5.3=py38h43734a8_0
+ - ipynbname=2021.3.2=pyhd8ed1ab_0
+ - ipython=7.10.1=py38h5ca1d4c_0
+ - ipython_genutils=0.2.0=py_1
+ - jedi=0.17.2=py38haa244fe_1
+ - jinja2=2.11.3=pyh44b312d_0
+ - jpeg=9d=h8ffe710_0
+ - jsoncpp=1.9.4=h2d74725_2
+ - jupyter_client=6.1.12=pyhd8ed1ab_0
+ - jupyter_core=4.7.1=py38haa244fe_0
+ - kealib=1.4.14=h96bfa42_2
+ - kiwisolver=1.3.1=py38hbd9d945_1
+ - krb5=1.17.2=hbae68bd_0
+ - laszip=3.4.3=h6538335_1
+ - laz-perf=1.4.4=ha925a31_0
+ - lcms2=2.12=h2a16943_0
+ - libblas=3.9.0=9_mkl
+ - libcblas=3.9.0=9_mkl
+ - libclang=11.1.0=default_h5c34c98_0
+ - libcurl=7.76.1=hf1763fc_1
+ - libffi=3.3=h0e60522_2
+ - libgdal=3.2.1=hca2c63a_7
+ - libglib=2.68.1=h1e62bf3_0
+ - libiconv=1.16=he774522_0
+ - libkml=1.3.0=h9859afa_1013
+ - liblapack=3.9.0=9_mkl
+ - libnetcdf=4.7.4=nompi_h3a9aa94_107
+ - libpng=1.6.37=h1d00b33_2
+ - libpq=13.2=h4f54205_2
+ - libprotobuf=3.15.8=h7755175_0
+ - librttopo=1.1.0=hb340de5_6
+ - libsodium=1.0.18=h8d14728_1
+ - libspatialindex=1.9.3=h39d44d4_3
+ - libspatialite=5.0.1=h987baeb_3
+ - libssh2=1.9.0=h680486a_6
+ - libtiff=4.2.0=hc10be44_1
+ - libwebp=1.2.0=h57928b3_0
+ - libwebp-base=1.2.0=h8ffe710_2
+ - libxml2=2.9.10=hf5bbc77_4
+ - libxslt=1.1.33=h65864e5_2
+ - libzip=1.7.3=hfed4ece_0
+ - lz4-c=1.9.3=h8ffe710_0
+ - m2w64-gcc-libgfortran=5.3.0=6
+ - m2w64-gcc-libs=5.3.0=7
+ - m2w64-gcc-libs-core=5.3.0=7
+ - m2w64-gmp=6.1.0=2
+ - m2w64-libwinpthread-git=5.0.0.4634.697f757=2
+ - markupsafe=1.1.1=py38h294d835_3
+ - matplotlib=3.4.1=py38haa244fe_0
+ - matplotlib-base=3.4.1=py38heae8d8c_0
+ - mkl=2021.2.0=hb70f87d_389
+ - mock=4.0.3=py38haa244fe_1
+ - msys2-conda-epoch=20160418=1
+ - munch=2.5.0=py_0
+ - nb_conda_kernels=2.3.1=py38haa244fe_0
+ - netcdf4=1.5.6=nompi_py38h8504c39_102
+ - nitro=2.7.dev6=h33f27b4_3
+ - nose2=0.9.2=py_0
+ - numpy=1.20.2=py38h09042cb_0
+ - oauthlib=3.0.1=py_0
+ - olefile=0.46=pyh9f0ad1d_1
+ - openjpeg=2.4.0=h48faf41_0
+ - openssl=1.1.1k=h8ffe710_0
+ - owslib=0.23.0=pyhd8ed1ab_0
+ - pandas=1.2.4=py38h60cbd38_0
+ - parso=0.7.1=pyh9f0ad1d_0
+ - pcre=8.44=ha925a31_0
+ - pdal=2.2.0=h56c84bb_7
+ - pickleshare=0.7.5=py_1003
+ - pillow=8.1.2=py38h9273828_1
+ - pip=21.1.1=pyhd8ed1ab_0
+ - pixman=0.40.0=h8ffe710_0
+ - plotly=4.14.3=pyh44b312d_0
+ - poppler=0.89.0=h3772339_5
+ - poppler-data=0.4.10=0
+ - postgresql=13.2=h0f1a9bc_2
+ - proj=7.2.0=h1cfcee9_2
+ - prompt-toolkit=3.0.18=pyha770c72_0
+ - prompt_toolkit=3.0.18=hd8ed1ab_0
+ - psycopg2=2.8.6=py38hd8c33c5_2
+ - pycparser=2.20=pyh9f0ad1d_2
+ - pygments=2.8.1=pyhd8ed1ab_0
+ - pyjwt=2.1.0=pyhd8ed1ab_0
+ - pyopenssl=20.0.1=pyhd8ed1ab_0
+ - pyparsing=2.4.7=pyh9f0ad1d_0
+ - pyproj=3.0.1=py38h0ae89ba_0
+ - pyqt=5.12.3=py38haa244fe_7
+ - pyqt-impl=5.12.3=py38h885f38d_7
+ - pyqt5-sip=4.19.18=py38h885f38d_7
+ - pyqtchart=5.12=py38h885f38d_7
+ - pyqtwebengine=5.12.1=py38h885f38d_7
+ - pyqtwebkit=5.212=py38h5e969cb_1
+ - pyshp=2.1.3=pyh44b312d_0
+ - pysocks=1.7.1=py38haa244fe_3
+ - python=3.8.8=h7840368_0_cpython
+ - python-dateutil=2.8.1=py_0
+ - python_abi=3.8=1_cp38
+ - pytz=2021.1=pyhd8ed1ab_0
+ - pywin32=300=py38h294d835_0
+ - pyyaml=5.4.1=py38h294d835_0
+ - pyzmq=22.0.3=py38h09162b1_1
+ - qca=2.2.1=hd7ce7fb_3
+ - qgis=3.18.1=py38hb614401_0
+ - qjson=0.9.0=hd7ce7fb_1006
+ - qscintilla2=2.11.2=py38h5e969cb_4
+ - qt=5.12.9=h5909a2a_4
+ - qtkeychain=0.12.0=h552f0f6_0
+ - qtlocation=5.12.9=ha925a31_0
+ - qtwebkit=5.212=hb258684_1
+ - qwt=6.1.6=h552f0f6_0
+ - qwtpolar=1.1.1=hd7ce7fb_7
+ - rasterio=1.2.3=py38h4f558c9_0
+ - requests=2.25.1=pyhd3deb0d_0
+ - requests-oauthlib=1.3.0=pyh9f0ad1d_0
+ - requests-toolbelt=0.9.1=py_0
+ - retrying=1.3.3=py_2
+ - rtree=0.9.7=py38h8b54edf_1
+ - scipy=1.6.3=py38he847743_0
+ - setuptools=49.6.0=py38haa244fe_3
+ - shapely=1.7.1=py38h2426642_4
+ - six=1.15.0=pyh9f0ad1d_0
+ - snuggs=1.4.7=py_0
+ - sqlite=3.35.5=h8ffe710_0
+ - tbb=2021.2.0=h2d74725_0
+ - tiledb=2.2.8=hf84e3da_0
+ - tk=8.6.10=h8ffe710_1
+ - tornado=6.1=py38h294d835_1
+ - tqdm=4.60.0=pyhd8ed1ab_0
+ - traitlets=5.0.5=py_0
+ - urllib3=1.26.5=pyhd8ed1ab_0
+ - vc=14.2=hb210afc_4
+ - vs2015_runtime=14.28.29325=h5e1d092_4
+ - wcwidth=0.2.5=pyh9f0ad1d_2
+ - wheel=0.36.2=pyhd3deb0d_0
+ - win_inet_pton=1.1.0=py38haa244fe_2
+ - wincertstore=0.2=py38haa244fe_1006
+ - xarray=0.17.0=pyhd8ed1ab_0
+ - xerces-c=3.2.3=h0e60522_2
+ - xz=5.2.5=h62dcd97_1
+ - yaml=0.2.5=he774522_0
+ - zeromq=4.3.4=h0e60522_0
+ - zlib=1.2.11=h62dcd97_1010
+ - zstd=1.4.9=h6255e5f_0
+ - pip:
+ - datetime==4.3
+ - decorator==4.4.2
+ - easymore==0.0.1
+ - geojson==2.5.0
+ - geovoronoi==0.3.0
+ - imageio==2.9.0
+ - json5==0.9.5
+ - networkx==2.5.1
+ - pysheds==0.2.7
+ - pywavelets==1.1.1
+ - scikit-image==0.18.1
+ - simpledbf==0.2.6
+ - tifffile==2021.4.8
+ - zope-interface==5.4.0
diff --git a/requirements.txt b/dependencies/cwarhm-summa/requirements.txt
similarity index 100%
rename from requirements.txt
rename to dependencies/cwarhm-summa/requirements.txt
diff --git a/rtd/Makefile b/dependencies/cwarhm-summa/rtd/Makefile
similarity index 100%
rename from rtd/Makefile
rename to dependencies/cwarhm-summa/rtd/Makefile
diff --git a/rtd/make.bat b/dependencies/cwarhm-summa/rtd/make.bat
similarity index 100%
rename from rtd/make.bat
rename to dependencies/cwarhm-summa/rtd/make.bat
diff --git a/rtd/requirements.txt b/dependencies/cwarhm-summa/rtd/requirements.txt
similarity index 100%
rename from rtd/requirements.txt
rename to dependencies/cwarhm-summa/rtd/requirements.txt
diff --git a/rtd/source/0.rst b/dependencies/cwarhm-summa/rtd/source/0.rst
similarity index 100%
rename from rtd/source/0.rst
rename to dependencies/cwarhm-summa/rtd/source/0.rst
diff --git a/rtd/source/0_control_filesREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/0_control_filesREADME.md.rst
similarity index 100%
rename from rtd/source/0_control_filesREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/0_control_filesREADME.md.rst
diff --git a/rtd/source/0_exampleREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/0_exampleREADME.md.rst
similarity index 100%
rename from rtd/source/0_exampleREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/0_exampleREADME.md.rst
diff --git a/rtd/source/0_toolsREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/0_toolsREADME.md.rst
similarity index 100%
rename from rtd/source/0_toolsREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/0_toolsREADME.md.rst
diff --git a/rtd/source/1.rst b/dependencies/cwarhm-summa/rtd/source/1.rst
similarity index 100%
rename from rtd/source/1.rst
rename to dependencies/cwarhm-summa/rtd/source/1.rst
diff --git a/rtd/source/1_folder_prepREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/1_folder_prepREADME.md.rst
similarity index 100%
rename from rtd/source/1_folder_prepREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/1_folder_prepREADME.md.rst
diff --git a/rtd/source/2.rst b/dependencies/cwarhm-summa/rtd/source/2.rst
similarity index 100%
rename from rtd/source/2.rst
rename to dependencies/cwarhm-summa/rtd/source/2.rst
diff --git a/rtd/source/2_installREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/2_installREADME.md.rst
similarity index 100%
rename from rtd/source/2_installREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/2_installREADME.md.rst
diff --git a/rtd/source/3.rst b/dependencies/cwarhm-summa/rtd/source/3.rst
similarity index 100%
rename from rtd/source/3.rst
rename to dependencies/cwarhm-summa/rtd/source/3.rst
diff --git a/rtd/source/3a_forcing1a_download_forcingREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/3a_forcing1a_download_forcingREADME.md.rst
similarity index 100%
rename from rtd/source/3a_forcing1a_download_forcingREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/3a_forcing1a_download_forcingREADME.md.rst
diff --git a/rtd/source/3a_forcing1b_download_geopotentialREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/3a_forcing1b_download_geopotentialREADME.md.rst
similarity index 100%
rename from rtd/source/3a_forcing1b_download_geopotentialREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/3a_forcing1b_download_geopotentialREADME.md.rst
diff --git a/rtd/source/3a_forcing2_merge_forcingREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/3a_forcing2_merge_forcingREADME.md.rst
similarity index 100%
rename from rtd/source/3a_forcing2_merge_forcingREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/3a_forcing2_merge_forcingREADME.md.rst
diff --git a/rtd/source/3a_forcing3_create_shapefileREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/3a_forcing3_create_shapefileREADME.md.rst
similarity index 100%
rename from rtd/source/3a_forcing3_create_shapefileREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/3a_forcing3_create_shapefileREADME.md.rst
diff --git a/rtd/source/3a_forcingREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/3a_forcingREADME.md.rst
similarity index 100%
rename from rtd/source/3a_forcingREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/3a_forcingREADME.md.rst
diff --git a/rtd/source/3b.rst b/dependencies/cwarhm-summa/rtd/source/3b.rst
similarity index 100%
rename from rtd/source/3b.rst
rename to dependencies/cwarhm-summa/rtd/source/3b.rst
diff --git a/rtd/source/3b_parametersMERIT_Hydro_DEM1_downloadREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/3b_parametersMERIT_Hydro_DEM1_downloadREADME.md.rst
similarity index 100%
rename from rtd/source/3b_parametersMERIT_Hydro_DEM1_downloadREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/3b_parametersMERIT_Hydro_DEM1_downloadREADME.md.rst
diff --git a/rtd/source/3b_parametersMERIT_Hydro_DEM3_create_vrtREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/3b_parametersMERIT_Hydro_DEM3_create_vrtREADME.md.rst
similarity index 100%
rename from rtd/source/3b_parametersMERIT_Hydro_DEM3_create_vrtREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/3b_parametersMERIT_Hydro_DEM3_create_vrtREADME.md.rst
diff --git a/rtd/source/3b_parametersMERIT_Hydro_DEM4_specify_subdomainREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/3b_parametersMERIT_Hydro_DEM4_specify_subdomainREADME.md.rst
similarity index 100%
rename from rtd/source/3b_parametersMERIT_Hydro_DEM4_specify_subdomainREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/3b_parametersMERIT_Hydro_DEM4_specify_subdomainREADME.md.rst
diff --git a/rtd/source/3b_parametersMERIT_Hydro_DEM5_convert_to_tifREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/3b_parametersMERIT_Hydro_DEM5_convert_to_tifREADME.md.rst
similarity index 100%
rename from rtd/source/3b_parametersMERIT_Hydro_DEM5_convert_to_tifREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/3b_parametersMERIT_Hydro_DEM5_convert_to_tifREADME.md.rst
diff --git a/rtd/source/3b_parametersMERIT_Hydro_DEMREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/3b_parametersMERIT_Hydro_DEMREADME.md.rst
similarity index 100%
rename from rtd/source/3b_parametersMERIT_Hydro_DEMREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/3b_parametersMERIT_Hydro_DEMREADME.md.rst
diff --git a/rtd/source/3b_parametersMODIS_MCD12Q1_V61_downloadREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/3b_parametersMODIS_MCD12Q1_V61_downloadREADME.md.rst
similarity index 100%
rename from rtd/source/3b_parametersMODIS_MCD12Q1_V61_downloadREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/3b_parametersMODIS_MCD12Q1_V61_downloadREADME.md.rst
diff --git a/rtd/source/3b_parametersMODIS_MCD12Q1_V62_create_vrtREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/3b_parametersMODIS_MCD12Q1_V62_create_vrtREADME.md.rst
similarity index 100%
rename from rtd/source/3b_parametersMODIS_MCD12Q1_V62_create_vrtREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/3b_parametersMODIS_MCD12Q1_V62_create_vrtREADME.md.rst
diff --git a/rtd/source/3b_parametersMODIS_MCD12Q1_V63_reproject_vrtREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/3b_parametersMODIS_MCD12Q1_V63_reproject_vrtREADME.md.rst
similarity index 100%
rename from rtd/source/3b_parametersMODIS_MCD12Q1_V63_reproject_vrtREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/3b_parametersMODIS_MCD12Q1_V63_reproject_vrtREADME.md.rst
diff --git a/rtd/source/3b_parametersMODIS_MCD12Q1_V64_specify_subdomainREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/3b_parametersMODIS_MCD12Q1_V64_specify_subdomainREADME.md.rst
similarity index 100%
rename from rtd/source/3b_parametersMODIS_MCD12Q1_V64_specify_subdomainREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/3b_parametersMODIS_MCD12Q1_V64_specify_subdomainREADME.md.rst
diff --git a/rtd/source/3b_parametersMODIS_MCD12Q1_V65_multiband_vrtREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/3b_parametersMODIS_MCD12Q1_V65_multiband_vrtREADME.md.rst
similarity index 100%
rename from rtd/source/3b_parametersMODIS_MCD12Q1_V65_multiband_vrtREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/3b_parametersMODIS_MCD12Q1_V65_multiband_vrtREADME.md.rst
diff --git a/rtd/source/3b_parametersMODIS_MCD12Q1_V66_convert_to_tifREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/3b_parametersMODIS_MCD12Q1_V66_convert_to_tifREADME.md.rst
similarity index 100%
rename from rtd/source/3b_parametersMODIS_MCD12Q1_V66_convert_to_tifREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/3b_parametersMODIS_MCD12Q1_V66_convert_to_tifREADME.md.rst
diff --git a/rtd/source/3b_parametersMODIS_MCD12Q1_V67_find_mode_land_classREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/3b_parametersMODIS_MCD12Q1_V67_find_mode_land_classREADME.md.rst
similarity index 100%
rename from rtd/source/3b_parametersMODIS_MCD12Q1_V67_find_mode_land_classREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/3b_parametersMODIS_MCD12Q1_V67_find_mode_land_classREADME.md.rst
diff --git a/rtd/source/3b_parametersMODIS_MCD12Q1_V6README.md.rst b/dependencies/cwarhm-summa/rtd/source/3b_parametersMODIS_MCD12Q1_V6README.md.rst
similarity index 100%
rename from rtd/source/3b_parametersMODIS_MCD12Q1_V6README.md.rst
rename to dependencies/cwarhm-summa/rtd/source/3b_parametersMODIS_MCD12Q1_V6README.md.rst
diff --git a/rtd/source/3b_parametersREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/3b_parametersREADME.md.rst
similarity index 100%
rename from rtd/source/3b_parametersREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/3b_parametersREADME.md.rst
diff --git a/rtd/source/3b_parametersSOILGRIDS1_downloadREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/3b_parametersSOILGRIDS1_downloadREADME.md.rst
similarity index 100%
rename from rtd/source/3b_parametersSOILGRIDS1_downloadREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/3b_parametersSOILGRIDS1_downloadREADME.md.rst
diff --git a/rtd/source/3b_parametersSOILGRIDS2_extract_domainREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/3b_parametersSOILGRIDS2_extract_domainREADME.md.rst
similarity index 100%
rename from rtd/source/3b_parametersSOILGRIDS2_extract_domainREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/3b_parametersSOILGRIDS2_extract_domainREADME.md.rst
diff --git a/rtd/source/3b_parametersSOILGRIDSREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/3b_parametersSOILGRIDSREADME.md.rst
similarity index 100%
rename from rtd/source/3b_parametersSOILGRIDSREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/3b_parametersSOILGRIDSREADME.md.rst
diff --git a/rtd/source/4.rst b/dependencies/cwarhm-summa/rtd/source/4.rst
similarity index 100%
rename from rtd/source/4.rst
rename to dependencies/cwarhm-summa/rtd/source/4.rst
diff --git a/rtd/source/4a_sort_shapeREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/4a_sort_shapeREADME.md.rst
similarity index 100%
rename from rtd/source/4a_sort_shapeREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/4a_sort_shapeREADME.md.rst
diff --git a/rtd/source/4b.rst b/dependencies/cwarhm-summa/rtd/source/4b.rst
similarity index 100%
rename from rtd/source/4b.rst
rename to dependencies/cwarhm-summa/rtd/source/4b.rst
diff --git a/rtd/source/4b_remapping1_topoREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/4b_remapping1_topoREADME.md.rst
similarity index 100%
rename from rtd/source/4b_remapping1_topoREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/4b_remapping1_topoREADME.md.rst
diff --git a/rtd/source/4b_remapping2_forcingREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/4b_remapping2_forcingREADME.md.rst
similarity index 100%
rename from rtd/source/4b_remapping2_forcingREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/4b_remapping2_forcingREADME.md.rst
diff --git a/rtd/source/4b_remappingREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/4b_remappingREADME.md.rst
similarity index 100%
rename from rtd/source/4b_remappingREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/4b_remappingREADME.md.rst
diff --git a/rtd/source/5.rst b/dependencies/cwarhm-summa/rtd/source/5.rst
similarity index 100%
rename from rtd/source/5.rst
rename to dependencies/cwarhm-summa/rtd/source/5.rst
diff --git a/rtd/source/5_model_inputREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/5_model_inputREADME.md.rst
similarity index 100%
rename from rtd/source/5_model_inputREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/5_model_inputREADME.md.rst
diff --git a/rtd/source/5_model_inputSUMMA0_base_settingsREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/5_model_inputSUMMA0_base_settingsREADME.md.rst
similarity index 100%
rename from rtd/source/5_model_inputSUMMA0_base_settingsREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/5_model_inputSUMMA0_base_settingsREADME.md.rst
diff --git a/rtd/source/5_model_inputSUMMA1a_copy_base_settingsREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/5_model_inputSUMMA1a_copy_base_settingsREADME.md.rst
similarity index 100%
rename from rtd/source/5_model_inputSUMMA1a_copy_base_settingsREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/5_model_inputSUMMA1a_copy_base_settingsREADME.md.rst
diff --git a/rtd/source/5_model_inputSUMMA1b_file_managerREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/5_model_inputSUMMA1b_file_managerREADME.md.rst
similarity index 100%
rename from rtd/source/5_model_inputSUMMA1b_file_managerREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/5_model_inputSUMMA1b_file_managerREADME.md.rst
diff --git a/rtd/source/5_model_inputSUMMA1c_forcing_file_listREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/5_model_inputSUMMA1c_forcing_file_listREADME.md.rst
similarity index 100%
rename from rtd/source/5_model_inputSUMMA1c_forcing_file_listREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/5_model_inputSUMMA1c_forcing_file_listREADME.md.rst
diff --git a/rtd/source/5_model_inputSUMMA1d_initial_conditionsREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/5_model_inputSUMMA1d_initial_conditionsREADME.md.rst
similarity index 100%
rename from rtd/source/5_model_inputSUMMA1d_initial_conditionsREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/5_model_inputSUMMA1d_initial_conditionsREADME.md.rst
diff --git a/rtd/source/5_model_inputSUMMA1e_trial_parametersREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/5_model_inputSUMMA1e_trial_parametersREADME.md.rst
similarity index 100%
rename from rtd/source/5_model_inputSUMMA1e_trial_parametersREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/5_model_inputSUMMA1e_trial_parametersREADME.md.rst
diff --git a/rtd/source/5_model_inputSUMMA1f_attributesREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/5_model_inputSUMMA1f_attributesREADME.md.rst
similarity index 100%
rename from rtd/source/5_model_inputSUMMA1f_attributesREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/5_model_inputSUMMA1f_attributesREADME.md.rst
diff --git a/rtd/source/5_model_inputSUMMAREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/5_model_inputSUMMAREADME.md.rst
similarity index 100%
rename from rtd/source/5_model_inputSUMMAREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/5_model_inputSUMMAREADME.md.rst
diff --git a/rtd/source/5_model_inputmizuRoute0_base_settingsREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/5_model_inputmizuRoute0_base_settingsREADME.md.rst
similarity index 100%
rename from rtd/source/5_model_inputmizuRoute0_base_settingsREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/5_model_inputmizuRoute0_base_settingsREADME.md.rst
diff --git a/rtd/source/5_model_inputmizuRoute1a_copy_base_settingsREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/5_model_inputmizuRoute1a_copy_base_settingsREADME.md.rst
similarity index 100%
rename from rtd/source/5_model_inputmizuRoute1a_copy_base_settingsREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/5_model_inputmizuRoute1a_copy_base_settingsREADME.md.rst
diff --git a/rtd/source/5_model_inputmizuRoute1b_network_topology_fileREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/5_model_inputmizuRoute1b_network_topology_fileREADME.md.rst
similarity index 100%
rename from rtd/source/5_model_inputmizuRoute1b_network_topology_fileREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/5_model_inputmizuRoute1b_network_topology_fileREADME.md.rst
diff --git a/rtd/source/5_model_inputmizuRoute1c_optional_remapping_fileREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/5_model_inputmizuRoute1c_optional_remapping_fileREADME.md.rst
similarity index 100%
rename from rtd/source/5_model_inputmizuRoute1c_optional_remapping_fileREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/5_model_inputmizuRoute1c_optional_remapping_fileREADME.md.rst
diff --git a/rtd/source/5_model_inputmizuRoute1d_control_fileREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/5_model_inputmizuRoute1d_control_fileREADME.md.rst
similarity index 100%
rename from rtd/source/5_model_inputmizuRoute1d_control_fileREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/5_model_inputmizuRoute1d_control_fileREADME.md.rst
diff --git a/rtd/source/5_model_inputmizuRouteREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/5_model_inputmizuRouteREADME.md.rst
similarity index 100%
rename from rtd/source/5_model_inputmizuRouteREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/5_model_inputmizuRouteREADME.md.rst
diff --git a/rtd/source/6.rst b/dependencies/cwarhm-summa/rtd/source/6.rst
similarity index 100%
rename from rtd/source/6.rst
rename to dependencies/cwarhm-summa/rtd/source/6.rst
diff --git a/rtd/source/6_model_runsREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/6_model_runsREADME.md.rst
similarity index 100%
rename from rtd/source/6_model_runsREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/6_model_runsREADME.md.rst
diff --git a/rtd/source/7.rst b/dependencies/cwarhm-summa/rtd/source/7.rst
similarity index 100%
rename from rtd/source/7.rst
rename to dependencies/cwarhm-summa/rtd/source/7.rst
diff --git a/rtd/source/7_visualizationREADME.md.rst b/dependencies/cwarhm-summa/rtd/source/7_visualizationREADME.md.rst
similarity index 100%
rename from rtd/source/7_visualizationREADME.md.rst
rename to dependencies/cwarhm-summa/rtd/source/7_visualizationREADME.md.rst
diff --git a/rtd/source/conf.py b/dependencies/cwarhm-summa/rtd/source/conf.py
similarity index 100%
rename from rtd/source/conf.py
rename to dependencies/cwarhm-summa/rtd/source/conf.py
diff --git a/rtd/source/create_symlinks.sh b/dependencies/cwarhm-summa/rtd/source/create_symlinks.sh
similarity index 100%
rename from rtd/source/create_symlinks.sh
rename to dependencies/cwarhm-summa/rtd/source/create_symlinks.sh
diff --git a/rtd/source/index.rst b/dependencies/cwarhm-summa/rtd/source/index.rst
similarity index 100%
rename from rtd/source/index.rst
rename to dependencies/cwarhm-summa/rtd/source/index.rst
diff --git a/docs/Makefile b/docs/Makefile
new file mode 100644
index 0000000..d0c3cbf
--- /dev/null
+++ b/docs/Makefile
@@ -0,0 +1,20 @@
+# Minimal makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line, and also
+# from the environment for the first two.
+SPHINXOPTS ?=
+SPHINXBUILD ?= sphinx-build
+SOURCEDIR = source
+BUILDDIR = build
+
+# Put it first so that "make" without argument is like "make help".
+help:
+ @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
+
+.PHONY: help Makefile
+
+# Catch-all target: route all unknown targets to Sphinx using the new
+# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
+%: Makefile
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
diff --git a/docs/make.bat b/docs/make.bat
new file mode 100644
index 0000000..6fcf05b
--- /dev/null
+++ b/docs/make.bat
@@ -0,0 +1,35 @@
+@ECHO OFF
+
+pushd %~dp0
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set SOURCEDIR=source
+set BUILDDIR=build
+
+if "%1" == "" goto help
+
+%SPHINXBUILD% >NUL 2>NUL
+if errorlevel 9009 (
+ echo.
+ echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
+ echo.installed, then set the SPHINXBUILD environment variable to point
+ echo.to the full path of the 'sphinx-build' executable. Alternatively you
+ echo.may add the Sphinx directory to PATH.
+ echo.
+ echo.If you don't have Sphinx installed, grab it from
+ echo.https://www.sphinx-doc.org/
+ exit /b 1
+)
+
+%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
+goto end
+
+:help
+%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
+
+:end
+popd
diff --git a/docs/source/_generated/cwarhm.data_specific_processing.merit.rst b/docs/source/_generated/cwarhm.data_specific_processing.merit.rst
new file mode 100644
index 0000000..fbc89be
--- /dev/null
+++ b/docs/source/_generated/cwarhm.data_specific_processing.merit.rst
@@ -0,0 +1,32 @@
+cwarhm.data\_specific\_processing.merit
+=======================================
+
+.. automodule:: cwarhm.data_specific_processing.merit
+
+
+
+
+
+
+
+ .. rubric:: Functions
+
+ .. autosummary::
+
+ all_merit_variables
+ build_merit_vrt
+ download_merit
+ extract_merit_tars
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/docs/source/_generated/cwarhm.data_specific_processing.rst b/docs/source/_generated/cwarhm.data_specific_processing.rst
new file mode 100644
index 0000000..e7d26d7
--- /dev/null
+++ b/docs/source/_generated/cwarhm.data_specific_processing.rst
@@ -0,0 +1,31 @@
+cwarhm.data\_specific\_processing
+=================================
+
+.. automodule:: cwarhm.data_specific_processing
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. rubric:: Modules
+
+.. autosummary::
+ :toctree:
+ :recursive:
+
+ cwarhm.data_specific_processing.merit
+
diff --git a/docs/source/_generated/cwarhm.model_agnostic_processing.model_domain.rst b/docs/source/_generated/cwarhm.model_agnostic_processing.model_domain.rst
new file mode 100644
index 0000000..a9bb74a
--- /dev/null
+++ b/docs/source/_generated/cwarhm.model_agnostic_processing.model_domain.rst
@@ -0,0 +1,23 @@
+cwarhm.model\_agnostic\_processing.model\_domain
+================================================
+
+.. automodule:: cwarhm.model_agnostic_processing.model_domain
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/docs/source/_generated/cwarhm.model_agnostic_processing.rst b/docs/source/_generated/cwarhm.model_agnostic_processing.rst
new file mode 100644
index 0000000..d225588
--- /dev/null
+++ b/docs/source/_generated/cwarhm.model_agnostic_processing.rst
@@ -0,0 +1,31 @@
+cwarhm.model\_agnostic\_processing
+==================================
+
+.. automodule:: cwarhm.model_agnostic_processing
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. rubric:: Modules
+
+.. autosummary::
+ :toctree:
+ :recursive:
+
+ cwarhm.model_agnostic_processing.model_domain
+
diff --git a/docs/source/_generated/cwarhm.model_specific_processing.rst b/docs/source/_generated/cwarhm.model_specific_processing.rst
new file mode 100644
index 0000000..cebff13
--- /dev/null
+++ b/docs/source/_generated/cwarhm.model_specific_processing.rst
@@ -0,0 +1,23 @@
+cwarhm.model\_specific\_processing
+==================================
+
+.. automodule:: cwarhm.model_specific_processing
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/docs/source/_generated/cwarhm.rst b/docs/source/_generated/cwarhm.rst
new file mode 100644
index 0000000..cefe59a
--- /dev/null
+++ b/docs/source/_generated/cwarhm.rst
@@ -0,0 +1,35 @@
+cwarhm
+======
+
+.. automodule:: cwarhm
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. rubric:: Modules
+
+.. autosummary::
+ :toctree:
+ :recursive:
+
+ cwarhm.data_specific_processing
+ cwarhm.model_agnostic_processing
+ cwarhm.model_specific_processing
+ cwarhm.util
+ cwarhm.wrappers
+
diff --git a/docs/source/_generated/cwarhm.util.rst b/docs/source/_generated/cwarhm.util.rst
new file mode 100644
index 0000000..243d3be
--- /dev/null
+++ b/docs/source/_generated/cwarhm.util.rst
@@ -0,0 +1,31 @@
+cwarhm.util
+===========
+
+.. automodule:: cwarhm.util
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. rubric:: Modules
+
+.. autosummary::
+ :toctree:
+ :recursive:
+
+ cwarhm.util.util
+
diff --git a/docs/source/_generated/cwarhm.util.util.rst b/docs/source/_generated/cwarhm.util.util.rst
new file mode 100644
index 0000000..0b96d4c
--- /dev/null
+++ b/docs/source/_generated/cwarhm.util.util.rst
@@ -0,0 +1,34 @@
+cwarhm.util.util
+================
+
+.. automodule:: cwarhm.util.util
+
+
+
+
+
+
+
+ .. rubric:: Functions
+
+ .. autosummary::
+
+ get_git_revision_hash
+ get_summa_workflow_control_setting
+ isstrbool
+ log_subprocess_output
+ read_summa_workflow_control_file
+ start_logger
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/docs/source/_generated/cwarhm.wrappers.cwarhm_summa.rst b/docs/source/_generated/cwarhm.wrappers.cwarhm_summa.rst
new file mode 100644
index 0000000..6db2ffe
--- /dev/null
+++ b/docs/source/_generated/cwarhm.wrappers.cwarhm_summa.rst
@@ -0,0 +1,92 @@
+cwarhm.wrappers.cwarhm\_summa
+=============================
+
+.. automodule:: cwarhm.wrappers.cwarhm_summa
+
+
+
+
+
+
+
+ .. rubric:: Functions
+
+ .. autosummary::
+
+ change_control_file_in_submodule
+ clone_mizuroute_repo
+ clone_summa_repo
+ compile_mizuroute
+ compile_summa
+ convert_merit_hydro_vrt_to_tif
+ convert_modis_vrt_to_tif
+ copy_mizuroute_base_settings
+ copy_summa_base_settings
+ create_ERA5_shapefile
+ create_folder_structure
+ create_merit_hydro_virtual_dataset
+ create_mizuroute_control_file
+ create_mizuroute_network_topology_file
+ create_modis_virtual_dataset
+ create_multiband_modis_vrt
+ create_summa_cold_state
+ create_summa_file_manager
+ create_summa_forcing_file_list
+ create_summa_trial_parameters
+ download_ERA5_geopotential
+ download_ERA5_pressureLevel_annual
+ download_ERA5_surfaceLevel_annual
+ download_merit_hydro_adjusted_elevation
+ download_modis_mcd12q1_v6
+ download_soilgrids_soilclass_global
+ exec_python_lwd
+ extract_soilgrids_domain
+ find_HRU_elevation
+ find_HRU_land_classes
+ find_HRU_soil_classes
+ find_mode_modis_landclass
+ initialize_summa_attributes_nc
+ insert_elevation_from_hist_into_summa_attributes
+ insert_landclass_from_hist_into_summa_attributes
+ insert_soilclass_from_hist_into_summa_attributes
+ localworkingdir
+ make_all_weighted_forcing_files
+ make_single_weighted_forcing_file
+ merge_forcing
+ plot_ERA5_download_coordinates_and_catchment_shapefile
+ plot_SWE_SM_ET_Q_per_GRU
+ plot_SWE_and_streamflow_per_HRU
+ plot_forcing_grid_vs_catchment_averaged
+ plot_geospatial_parameters_to_model_elements
+ plot_mizuroute_and_summa_shapefiles
+ plot_temperature_lapse_rates
+ read_mizuroute_base_settings
+ read_summa_base_settings
+ remap_summa_catchments_to_mizurouting
+ reproject_modis_virtual_dataset
+ run_download_ERA5_pressureLevel_paralell
+ run_download_ERA5_surfaceLevel_paralell
+ run_jupyter_notebook
+ run_mizuroute
+ run_summa
+ run_summa_as_array
+ set_default_path
+ sort_catchment_shape
+ specify_merit_hydro_subdomain
+ specify_modis_subdomain
+ subprocess_lwd
+ temperature_lapsing_and_datastep
+ unpack_merit_hydro
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/docs/source/_generated/cwarhm.wrappers.rst b/docs/source/_generated/cwarhm.wrappers.rst
new file mode 100644
index 0000000..060cafb
--- /dev/null
+++ b/docs/source/_generated/cwarhm.wrappers.rst
@@ -0,0 +1,31 @@
+cwarhm.wrappers
+===============
+
+.. automodule:: cwarhm.wrappers
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.. rubric:: Modules
+
+.. autosummary::
+ :toctree:
+ :recursive:
+
+ cwarhm.wrappers.cwarhm_summa
+
diff --git a/docs/source/api.rst b/docs/source/api.rst
new file mode 100644
index 0000000..d626212
--- /dev/null
+++ b/docs/source/api.rst
@@ -0,0 +1,8 @@
+API
+===
+
+.. autosummary::
+ :toctree: _generated
+ :recursive:
+
+ cwarhm
diff --git a/docs/source/conf.py b/docs/source/conf.py
new file mode 100644
index 0000000..2bf8853
--- /dev/null
+++ b/docs/source/conf.py
@@ -0,0 +1,66 @@
+# Configuration file for the Sphinx documentation builder.
+#
+# This file only contains a selection of the most common options. For a full
+# list see the documentation:
+# https://www.sphinx-doc.org/en/master/usage/configuration.html
+
+# -- Path setup --------------------------------------------------------------
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#
+import pathlib
+import sys
+
+sys.path.insert(0, pathlib.Path(__file__).parents[2].resolve().as_posix())
+
+
+# -- Project information -----------------------------------------------------
+
+project = "CWARHM"
+copyright = "2021, USask Computational Hydrology Group"
+author = "Wouter Knoben, Bart van Osnabrugge"
+
+# The full version, including alpha/beta/rc tags
+release = "0.1 alpha"
+
+
+# -- General configuration ---------------------------------------------------
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+ "sphinx.ext.duration",
+ "sphinx.ext.doctest",
+ "sphinx.ext.autodoc",
+ "sphinx.ext.autosummary",
+]
+autosummary_generate = True
+
+# extensions.append('autoapi.extension')
+# autoapi_type = 'python'
+# autoapi_dirs = ['../../cwarhm']
+
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ["_templates"]
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+# This pattern also affects html_static_path and html_extra_path.
+exclude_patterns = []
+
+
+# -- Options for HTML output -------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+#
+html_theme = "sphinx_rtd_theme"
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ["_static"]
diff --git a/docs/source/index.rst b/docs/source/index.rst
new file mode 100644
index 0000000..3925bff
--- /dev/null
+++ b/docs/source/index.rst
@@ -0,0 +1,34 @@
+.. cwarhm documentation master file, created by
+ sphinx-quickstart on Thu Nov 18 14:24:04 2021.
+ You can adapt this file completely to your liking, but it should at least
+ contain the root `toctree` directive.
+
+Welcome to cwarhm's documentation!
+=====================================
+
+**cwarhm** is a Python library to organize workflows to build hydrological models.
+cwarhm stands for Community Workflows to Advance Reproducibility in Hydrological Modelling
+
+Check out the :doc:`usage` section for further information, , including how to
+:ref:`install ` the project.
+
+.. note::
+ This project is in its early stages of development
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Contents:
+
+ usage
+ development
+ wrappers
+ api
+
+
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
diff --git a/docs/source/usage.rst b/docs/source/usage.rst
new file mode 100644
index 0000000..fb492e9
--- /dev/null
+++ b/docs/source/usage.rst
@@ -0,0 +1,41 @@
+Usage
+=====
+
+.. _installation:
+
+Installation
+------------
+
+To use cwarhm, certain dependencies need to be installed. To install using conda:
+
+On the command line or Anaconda prompt go to the cwarhm directory. Then:
+
+.. code-block:: console
+
+ $ conda env create -f environment.yml
+ $ conda activate cwarhm-env
+
+Last, the package cwarhm itself can be added to the environment:
+
+.. code-block:: console
+
+ $ pip install -e .
+
+First test
+----------
+As test case workflows/summa_bowatbanff/ is available. Before running the test script test_bow_at_banff.py, some path modifications have to be made:
+
+1. In control_Bow_at_Banff_test.txt, change the **root_path** modeling domain setting to a local folder where the test results should be saved
+2. In test_bow_at_banff.py, change the paths:
+
+ #. cwarhm_summa_folder
+ #. results_folder_path , to match the root_path in the control file
+ #. test_data_path , path to the test data, if you want to skip the data download workflow steps (by default)
+ #. reset_test to True (default), this is a flag that starts a new run (and deletes all data from any results folder), and restarts by copying the test data to the results folder or False: continue with existing data in the results folder.
+
+3. The test data is not part of this repo due to its size (30+GB). For those in the comphyd group it can be found here:
+
+**copernicus** /project/gwf/gwf_cmt/cwarhm_test_data
+**GRAHAM** /project/6008034/CompHydCore/cwarhm_test_data
+
+If this is not accessible, you can download the data with the CWARHM functions (wrapped from the original CWARHM). You can also use a results directory from an earlier test run.
diff --git a/docs/source/wrappers.rst b/docs/source/wrappers.rst
new file mode 100644
index 0000000..091dc6e
--- /dev/null
+++ b/docs/source/wrappers.rst
@@ -0,0 +1,24 @@
+Wrappers
+========
+
+Many development work is done is different languages or with different code bases.
+To maintain original authorship, the preferred way is to link directly to the original code.
+
+Github
+------
+
+In github, this is done through submodules. A submodule....
+
+Python
+------
+
+The preferred way to share code between python developments is through imports.
+
+
+Why wrappers
+------------
+
+Wrappers are funcions in the cwarhm package that wrap around submodules.
+An example is the wrapper to set up SUMMA models, from the summaWorkflow_public submodule:
+
+.. autofunction:: cwarhm.wrappers.cwarhm_summa.create_folder_structure
diff --git a/environment.yml b/environment.yml
index 4a5186c..09b69bc 100644
--- a/environment.yml
+++ b/environment.yml
@@ -1,208 +1,26 @@
-name: summa-env
+name: cwarhm-env
channels:
- conda-forge
- defaults
dependencies:
- - affine=2.3.0=py_0
- - attrs=20.3.0=pyhd3deb0d_0
- - backcall=0.2.0=pyh9f0ad1d_0
- - backports=1.0=py_2
- - backports.functools_lru_cache=1.6.4=pyhd8ed1ab_0
- - blinker=1.4=py_1
- - boost-cpp=1.74.0=h54f0996_2
- - brotlipy=0.7.0=py38h294d835_1001
- - bzip2=1.0.8=h8ffe710_4
- - ca-certificates=2020.12.5=h5b45459_0
- - cairo=1.16.0=hb19e0ff_1008
- - cdsapi=0.5.1=pyhd8ed1ab_0
- - certifi=2020.12.5=py38haa244fe_1
- - cffi=1.14.5=py38hd8c33c5_0
- - cfitsio=3.470=h0af3d06_7
- - cftime=1.4.1=py38h347fdf6_0
- - chardet=4.0.0=py38haa244fe_1
- - click=7.1.2=pyh9f0ad1d_0
- - click-plugins=1.1.1=py_0
- - cligj=0.7.1=pyhd8ed1ab_0
- - colorama=0.4.4=pyh9f0ad1d_0
- - coverage=5.5=py38h294d835_0
- - cryptography=3.4.7=py38hd7da0ea_0
- - curl=7.76.1=hf1763fc_1
- - cycler=0.10.0=py_2
- - exiv2=0.27.1=hfa2c1cf_0
- - expat=2.3.0=h39d44d4_0
- - fiona=1.8.18=py38h91b197a_1
- - fontconfig=2.13.1=h1989441_1005
- - freetype=2.10.4=h546665d_1
- - freexl=1.0.6=ha8e266a_0
- - future=0.18.2=py38haa244fe_3
- - gdal=3.2.1=py38hb9e1232_7
- - geopandas=0.9.0=pyhd8ed1ab_0
- - geos=3.9.1=h39d44d4_2
- - geotiff=1.6.0=hee96dd5_4
- - gettext=0.19.8.1=h1a89ca6_1005
- - gsl=2.6=hdfb1a43_2
- - hdf4=4.2.13=h0e5069d_1005
- - hdf5=1.10.6=nompi_h5268f04_1114
- - hs_restclient=1.3.7=pyh9f0ad1d_0
- - httplib2=0.19.1=pyhd8ed1ab_0
- - icu=68.1=h0e60522_0
- - idna=2.10=pyh9f0ad1d_0
- - intel-openmp=2021.2.0=h57928b3_616
- - ipykernel=5.5.3=py38h43734a8_0
- - ipynbname=2021.3.2=pyhd8ed1ab_0
- - ipython=7.10.1=py38h5ca1d4c_0
- - ipython_genutils=0.2.0=py_1
- - jedi=0.17.2=py38haa244fe_1
- - jinja2=2.11.3=pyh44b312d_0
- - jpeg=9d=h8ffe710_0
- - jsoncpp=1.9.4=h2d74725_2
- - jupyter_client=6.1.12=pyhd8ed1ab_0
- - jupyter_core=4.7.1=py38haa244fe_0
- - kealib=1.4.14=h96bfa42_2
- - kiwisolver=1.3.1=py38hbd9d945_1
- - krb5=1.17.2=hbae68bd_0
- - laszip=3.4.3=h6538335_1
- - laz-perf=1.4.4=ha925a31_0
- - lcms2=2.12=h2a16943_0
- - libblas=3.9.0=9_mkl
- - libcblas=3.9.0=9_mkl
- - libclang=11.1.0=default_h5c34c98_0
- - libcurl=7.76.1=hf1763fc_1
- - libffi=3.3=h0e60522_2
- - libgdal=3.2.1=hca2c63a_7
- - libglib=2.68.1=h1e62bf3_0
- - libiconv=1.16=he774522_0
- - libkml=1.3.0=h9859afa_1013
- - liblapack=3.9.0=9_mkl
- - libnetcdf=4.7.4=nompi_h3a9aa94_107
- - libpng=1.6.37=h1d00b33_2
- - libpq=13.2=h4f54205_2
- - libprotobuf=3.15.8=h7755175_0
- - librttopo=1.1.0=hb340de5_6
- - libsodium=1.0.18=h8d14728_1
- - libspatialindex=1.9.3=h39d44d4_3
- - libspatialite=5.0.1=h987baeb_3
- - libssh2=1.9.0=h680486a_6
- - libtiff=4.2.0=hc10be44_1
- - libwebp=1.2.0=h57928b3_0
- - libwebp-base=1.2.0=h8ffe710_2
- - libxml2=2.9.10=hf5bbc77_4
- - libxslt=1.1.33=h65864e5_2
- - libzip=1.7.3=hfed4ece_0
- - lz4-c=1.9.3=h8ffe710_0
- - m2w64-gcc-libgfortran=5.3.0=6
- - m2w64-gcc-libs=5.3.0=7
- - m2w64-gcc-libs-core=5.3.0=7
- - m2w64-gmp=6.1.0=2
- - m2w64-libwinpthread-git=5.0.0.4634.697f757=2
- - markupsafe=1.1.1=py38h294d835_3
- - matplotlib=3.4.1=py38haa244fe_0
- - matplotlib-base=3.4.1=py38heae8d8c_0
- - mkl=2021.2.0=hb70f87d_389
- - mock=4.0.3=py38haa244fe_1
- - msys2-conda-epoch=20160418=1
- - munch=2.5.0=py_0
- - nb_conda_kernels=2.3.1=py38haa244fe_0
- - netcdf4=1.5.6=nompi_py38h8504c39_102
- - nitro=2.7.dev6=h33f27b4_3
- - nose2=0.9.2=py_0
- - numpy=1.20.2=py38h09042cb_0
- - oauthlib=3.0.1=py_0
- - olefile=0.46=pyh9f0ad1d_1
- - openjpeg=2.4.0=h48faf41_0
- - openssl=1.1.1k=h8ffe710_0
- - owslib=0.23.0=pyhd8ed1ab_0
- - pandas=1.2.4=py38h60cbd38_0
- - parso=0.7.1=pyh9f0ad1d_0
- - pcre=8.44=ha925a31_0
- - pdal=2.2.0=h56c84bb_7
- - pickleshare=0.7.5=py_1003
- - pillow=8.1.2=py38h9273828_1
- - pip=21.1.1=pyhd8ed1ab_0
- - pixman=0.40.0=h8ffe710_0
- - plotly=4.14.3=pyh44b312d_0
- - poppler=0.89.0=h3772339_5
- - poppler-data=0.4.10=0
- - postgresql=13.2=h0f1a9bc_2
- - proj=7.2.0=h1cfcee9_2
- - prompt-toolkit=3.0.18=pyha770c72_0
- - prompt_toolkit=3.0.18=hd8ed1ab_0
- - psycopg2=2.8.6=py38hd8c33c5_2
- - pycparser=2.20=pyh9f0ad1d_2
- - pygments=2.8.1=pyhd8ed1ab_0
- - pyjwt=2.1.0=pyhd8ed1ab_0
- - pyopenssl=20.0.1=pyhd8ed1ab_0
- - pyparsing=2.4.7=pyh9f0ad1d_0
- - pyproj=3.0.1=py38h0ae89ba_0
- - pyqt=5.12.3=py38haa244fe_7
- - pyqt-impl=5.12.3=py38h885f38d_7
- - pyqt5-sip=4.19.18=py38h885f38d_7
- - pyqtchart=5.12=py38h885f38d_7
- - pyqtwebengine=5.12.1=py38h885f38d_7
- - pyqtwebkit=5.212=py38h5e969cb_1
- - pyshp=2.1.3=pyh44b312d_0
- - pysocks=1.7.1=py38haa244fe_3
- - python=3.8.8=h7840368_0_cpython
- - python-dateutil=2.8.1=py_0
- - python_abi=3.8=1_cp38
- - pytz=2021.1=pyhd8ed1ab_0
- - pywin32=300=py38h294d835_0
- - pyyaml=5.4.1=py38h294d835_0
- - pyzmq=22.0.3=py38h09162b1_1
- - qca=2.2.1=hd7ce7fb_3
- - qgis=3.18.1=py38hb614401_0
- - qjson=0.9.0=hd7ce7fb_1006
- - qscintilla2=2.11.2=py38h5e969cb_4
- - qt=5.12.9=h5909a2a_4
- - qtkeychain=0.12.0=h552f0f6_0
- - qtlocation=5.12.9=ha925a31_0
- - qtwebkit=5.212=hb258684_1
- - qwt=6.1.6=h552f0f6_0
- - qwtpolar=1.1.1=hd7ce7fb_7
- - rasterio=1.2.3=py38h4f558c9_0
- - requests=2.25.1=pyhd3deb0d_0
- - requests-oauthlib=1.3.0=pyh9f0ad1d_0
- - requests-toolbelt=0.9.1=py_0
- - retrying=1.3.3=py_2
- - rtree=0.9.7=py38h8b54edf_1
- - scipy=1.6.3=py38he847743_0
- - setuptools=49.6.0=py38haa244fe_3
- - shapely=1.7.1=py38h2426642_4
- - six=1.15.0=pyh9f0ad1d_0
- - snuggs=1.4.7=py_0
- - sqlite=3.35.5=h8ffe710_0
- - tbb=2021.2.0=h2d74725_0
- - tiledb=2.2.8=hf84e3da_0
- - tk=8.6.10=h8ffe710_1
- - tornado=6.1=py38h294d835_1
- - tqdm=4.60.0=pyhd8ed1ab_0
- - traitlets=5.0.5=py_0
- - urllib3=1.26.5=pyhd8ed1ab_0
- - vc=14.2=hb210afc_4
- - vs2015_runtime=14.28.29325=h5e1d092_4
- - wcwidth=0.2.5=pyh9f0ad1d_2
- - wheel=0.36.2=pyhd3deb0d_0
- - win_inet_pton=1.1.0=py38haa244fe_2
- - wincertstore=0.2=py38haa244fe_1006
- - xarray=0.17.0=pyhd8ed1ab_0
- - xerces-c=3.2.3=h0e60522_2
- - xz=5.2.5=h62dcd97_1
- - yaml=0.2.5=he774522_0
- - zeromq=4.3.4=h0e60522_0
- - zlib=1.2.11=h62dcd97_1010
- - zstd=1.4.9=h6255e5f_0
+ - python = 3.8.8 # gdal issues with python 3.9
+ - pip
+ - jupyter # for visualization
+ - nbconvert # for running notebooks from terminal
+ - numpy
+ - gdal < 3.4.0 # conflict with fiona 1.8.20
+ - rasterio
+ - xarray
+ - dask
+ - netCDF4
+ - pandas < 1.4.0 # numpy integer / float attribute error
+ - geopandas
+ - sphinx # for documentation
+ - pathlib
+ - qgis
- pip:
- - datetime==4.3
- - decorator==4.4.2
- - easymore==0.0.1
- - geojson==2.5.0
- - geovoronoi==0.3.0
- - imageio==2.9.0
- - json5==0.9.5
- - networkx==2.5.1
- - pysheds==0.2.7
- - pywavelets==1.1.1
- - scikit-image==0.18.1
- - simpledbf==0.2.6
- - tifffile==2021.4.8
- - zope-interface==5.4.0
+ - easymore
+ - rasterstats
+ - sphinx-rtd-theme # for read the docs lay-out
+ - black # for code reformatting
+ - cdsapi # for ERA5 downloading
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..714bb42
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,17 @@
+from setuptools import setup
+from setuptools import find_packages
+
+with open("README.rst", "r") as f:
+ long_description = f.read()
+
+setup(
+ name="CWARHM",
+ version="0.1",
+ description="Community Workflows to Advance Reproducibility in Hydrological Modelling",
+ license="MIT",
+ long_description=long_description,
+ author="CH-EARTH - computational hydrology group at Centre for Hydrology, University of Saskatchewan",
+ url="https://github.com/CH-Earth/CWARHM",
+ packages=find_packages()
+ # packages=['fairymwah'] #same as name
+)
diff --git a/tests/mesh_bowatbanff/MESH_parameters.nc b/tests/mesh_bowatbanff/MESH_parameters.nc
new file mode 100644
index 0000000..7224a57
Binary files /dev/null and b/tests/mesh_bowatbanff/MESH_parameters.nc differ
diff --git a/tests/mesh_bowatbanff/control_Bow_at_Banff_test.txt b/tests/mesh_bowatbanff/control_Bow_at_Banff_test.txt
new file mode 100644
index 0000000..bb3b2b6
--- /dev/null
+++ b/tests/mesh_bowatbanff/control_Bow_at_Banff_test.txt
@@ -0,0 +1,234 @@
+# SUMMA workflow setting file.
+# Characters '|' and '#' are used as separators to find the actual setting values. Any text behind '|' is assumed to be part of the setting value, unless preceded by '#'.
+
+# Note on path specification
+# If deviating from default paths, a full path must be specified. E.g. '/home/user/non-default/path'
+
+
+# Modeling domain settings
+root_path | /Users/ayx374/Documents/project/chwarm_test_results # Root folder where data will be stored.
+domain_name | BowAtBanff_mesh # Used as part of the root folder name for the prepared data.
+
+
+# Shapefile settings - SUMMA catchment file
+catchment_shp_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/catchment'.
+catchment_shp_name | bow_distributed_elevation_zone.shp # Name of the catchment shapefile. Requires extension '.shp'.
+catchment_shp_gruid | GRU_ID # Name of the GRU ID column (can be any numeric value, HRU's within a single GRU have the same GRU ID).
+catchment_shp_hruid | HRU_ID # Name of the HRU ID column (consecutive from 1 to total number of HRUs, must be unique).
+catchment_shp_area | HRU_area # Name of the catchment area column. Area must be in units [m^2]
+catchment_shp_lat | center_lat # Name of the latitude column. Should be a value representative for the HRU. Typically the centroid.
+catchment_shp_lon | center_lon # Name of the longitude column. Should be a value representative for the HRU. Typically the centroid.
+
+
+# Shapefile settings - river network file
+river_network_shp_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/river_network'.
+river_network_shp_name | bow_river_network_from_merit_hydro.shp # Name of the river network shapefile. Requires extension '.shp'.
+river_network_shp_segid | COMID # Name of the segment ID column.
+river_network_shp_downsegid | NextDownID # Name of the downstream segment ID column.
+river_network_shp_slope | slope # Name of the slope column. Slope must be in in units [length/length].
+river_network_shp_length | length # Name of the segment length column. Length must be in units [m].
+
+
+# Shapefile settings - catchment file
+river_basin_shp_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/river_basins'.
+river_basin_shp_name | bow_distributed.shp # Name of the routing subbasins shapefile needed for remapping. Requires extension '.shp'.
+river_basin_shp_rm_hruid | COMID # Name of the routing basin ID column.
+river_basin_shp_area | area # Name of the catchment area column. Area must be in units [m^2]
+river_basin_shp_hru_to_seg | hru_to_seg # Name of the column that shows which river segment each HRU connects to.
+
+
+# Shapefile settings - SUMMA-to-mizuRoute
+river_basin_needs_remap | yes # 'no' if routing basins map 1:1 onto model GRUs. 'yes' if river segments span multiple GRUs or if multiple segments are inside a single GRU.
+
+
+# Install settings
+github_summa | https://github.com/CH-Earth/summa # Replace this with the path to your own fork if you forked the repo.
+github_mizuroute | https://github.com/ncar/mizuroute # Replace this with the path to your own fork if you forked the repo.
+install_path_summa | default # If 'default', clones source code into 'root_path/installs/summa'.
+install_path_mizuroute | default # If 'default', clones source code into 'root_path/installs/mizuRoute'.
+exe_name_summa | summa.exe # Name of the compiled executable.
+exe_name_mizuroute | mizuroute.exe # Name of the compiled executable.
+
+
+# Forcing settings
+forcing_raw_time | 2008,2013 # Years to download: Jan-[from],Dec-[to].
+forcing_raw_space | 51.74/-116.55/50.95/-115.52 # Bounding box of the shapefile: lat_max/lon_min/lat_min/lon_max. Will be converted to ERA5 download coordinates in script. Order and use of '/' to separate values is mandatory.
+forcing_time_step_size | 3600 # Size of the forcing time step in [s]. Must be constant.
+forcing_measurement_height | 3 # Reference height for forcing measurements [m].
+forcing_shape_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/forcing'.
+forcing_shape_name | era5_grid.shp # Name of the forcing shapefile. Requires extension '.shp'.
+forcing_shape_lat_name | lat # Name of the latitude field that contains the latitude of ERA5 data points.
+forcing_shape_lon_name | lon # Name of the longitude field that contains the latitude of ERA5 data points.
+forcing_geo_path | default # If 'default', uses 'root_path/domain_[name]/forcing/0_geopotential'.
+forcing_raw_path | default # If 'default', uses 'root_path/domain_[name]/forcing/1_raw_data'.
+forcing_merged_path | default # If 'default', uses 'root_path/domain_[name]/forcing/2_merged_data'.
+forcing_easymore_path | default # If 'default', uses 'root_path/domain_[name]/forcing/3_temp_easymore'.
+forcing_basin_avg_path | default # If 'default', uses 'root_path/domain_[name]/forcing/3_basin_averaged_data'.
+forcing_summa_path | default # If 'default', uses 'root_path/domain_[name]/forcing/4_SUMMA_input'.
+
+
+# Parameter settings - DEM
+parameter_dem_main_url | http://hydro.iis.u-tokyo.ac.jp/~yamadai/MERIT_Hydro/distribute/v1.0.1/ # Primary download URL for MERIT Hydro adjusted elevation data. Needs to be appended with filenames.
+parameter_dem_file_template | elv_{}{}.tar # Template for download file names.
+parameter_dem_raw_path | default # If 'default', uses 'root_path/domain_[name]/parameters/dem/1_MERIT_hydro_raw_data'.
+parameter_dem_unpack_path | default # If 'default', uses 'root_path/domain_[name]/parameters/dem/2_MERIT_hydro_unpacked_data'.
+parameter_dem_vrt1_path | default # If 'default', uses 'root_path/domain_[name]/parameters/dem/3_vrt'.
+parameter_dem_vrt2_path | default # If 'default', uses 'root_path/domain_[name]/parameters/dem/4_domain_vrt'.
+parameter_dem_tif_path | default # If 'default', uses 'root_path/domain_[name]/parameters/dem/5_elevation'.
+parameter_dem_tif_name | elevation.tif # Name of the final DEM for the domain. Must be in .tif format.
+
+
+# Parameter settings - soil
+parameter_soil_hydro_ID | 1361509511e44adfba814f6950c6e742 # ID of the Hydroshare resource to download.
+parameter_soil_raw_path | default # If 'default', uses 'root_path/domain_[name]/parameters/soilclass/1_soil_classes_global'.
+parameter_soil_domain_path | default # If 'default', uses 'root_path/domain_[name]/parameters/soilclass/2_soil_classes_domain'.
+parameter_soil_tif_name | soil_classes.tif # Name of the final soil class overview for the domain. Must be in .tif format.
+
+
+# Parameter settings - land
+parameter_land_list_path | default # If 'default', uses 'summaWorkflow_public/3b_parameters/MODIS_MCD12Q1_V6/1_download/'. Location of file with data download links.
+parameter_land_list_name | daac_mcd12q1_data_links.txt # Name of file that contains list of MODIS download urls.
+parameter_land_raw_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/1_MODIS_raw_data'.
+parameter_land_vrt1_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/2_vrt_native_crs'. Virtual dataset composed of .hdf files.
+parameter_land_vrt2_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/3_vrt_epsg_4326'. Virtual dataset projected in EPSG:4326.
+parameter_land_vrt3_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/4_domain_vrt_epsg_4326'. Virtual dataset cropped to model domain.
+parameter_land_vrt4_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/5_multiband_domain_vrt_epsg_4326'. Multiband cropped virtual dataset.
+parameter_land_tif_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/6_tif_multiband'.
+parameter_land_mode_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/7_mode_land_class'.
+parameter_land_tif_name | land_classes.tif # Name of the final landclass overview for the domain. Must be in .tif format.
+
+
+# Intersection settings
+intersect_dem_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/catchment_intersection/with_dem'.
+intersect_dem_name | catchment_with_merit_dem.shp # Name of the shapefile with intersection between catchment and MERIT Hydro DEM, stored in column 'elev_mean'.
+intersect_soil_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/catchment_intersection/with_soilgrids'.
+intersect_soil_name | catchment_with_soilgrids.shp # Name of the shapefile with intersection between catchment and SOILGRIDS-derived USDA soil classes, stored in columns 'USDA_{1,...n}'
+intersect_land_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/catchment_intersection/with_modis'.
+intersect_land_name | catchment_with_modis.shp # Name of the shapefile with intersection between catchment and MODIS-derived IGBP land classes, stored in columns 'IGBP_{1,...n}'
+intersect_forcing_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/catchment_intersection/with_forcing'.
+intersect_routing_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/catchment_intersection/with_routing'.
+intersect_routing_name | catchment_with_routing_basins.shp # Name of the shapefile with intersection between hydrologic model catchments and routing model catchments.
+
+
+# Experiment settings - general
+experiment_id | run1 # Descriptor of the modelling experiment; used as output folder name.
+experiment_time_start | default # Simulation start. If 'default', constructs this from 'forcing_raw_time' setting and uses all downloaded forcing data; e.g. '1979-01-01 00:00'.
+experiment_time_end | default # Simulation end. If 'default', constructs this from 'forcing_raw_time' setting and uses all downloaded forcing data; e.g. '1979-12-31 23:00'.
+experiment_output_summa | default # If 'default', uses 'root_path/domain_[name]/simulations/[experiment_id]/SUMMA'.
+experiment_output_mizuRoute | default # If 'default', uses 'root_path/domain_[name]/simulations/[experiment_id]/mizuRoute'.
+experiment_log_summa | default # If 'default', uses 'root_path/domain_[name]/simulations/[experiment_id]/SUMMA/SUMMA_logs'.
+experiment_log_mizuroute | default # If 'default', uses 'root_path/domain_[name]/simulations/[experiment_id]/mizuRoute/mizuRoute_logs'.
+experiment_backup_settings | yes # Flag to (not) create a copy of the model settings in the output folder; "no" or "yes". Copying settings may be undesirable if files are large.
+
+
+# Experiment settings - SUMMA
+settings_summa_path | default # If 'default', uses 'root_path/domain_[name]/settings/SUMMA'.
+settings_summa_filemanager | fileManager.txt # Name of the file with the SUMMA inputs.
+settings_summa_coldstate | coldState.nc # Name of the file with intial states.
+settings_summa_trialParams | trialParams.nc # Name of the file that can contain trial parameter values (note, can be empty of any actual parameter values but must be provided and must contain an 'hruId' variable).
+settings_summa_forcing_list | forcingFileList.txt # Name of the file that has the list of forcing files.
+settings_summa_attributes | attributes.nc # Name of the attributes file.
+settings_summa_connect_HRUs | no # Attribute setting: "no" or "yes". Tricky concept, see README in ./5_model_input/SUMMA/3f_attributes. If no; all HRUs modeled as independent columns (downHRUindex = 0). If yes; HRUs within each GRU are connected based on relative HRU elevation (highest = upstream, lowest = outlet).
+settings_summa_trialParam_n | 1 # Number of trial parameter specifications. Specify 0 if none are wanted (they can still be included in this file but won't be read).
+settings_summa_trialParam_1 | maxstep,900 # Name of trial parameter and value to assign. Value assumed to be float.
+
+
+# Experiment settings - mizuRoute
+settings_mizu_path | default # If 'default', uses 'root_path/domain_[name]/settings/mizuRoute'.
+settings_mizu_parameters | param.nml.default # Name of the routing parameters file.
+settings_mizu_topology | topology.nc # Name of the river network topology file.
+settings_mizu_remap | routing_remap.nc # Name of the optional catchment remapping file, for cases when SUMMA uses different catchments than mizuRoute.
+settings_mizu_control_file | mizuroute.control # Name of the control file.
+settings_mizu_routing_var | averageRoutedRunoff # Name of SUMMA output variable to use for routing.
+settings_mizu_routing_units | m/s # Units of the variable to be routed.
+settings_mizu_routing_dt | 3600 # Size of the routing time step [s].
+settings_mizu_output_freq | annual # Frequency with which mizuRoute generates new output files. Must be one of 'single', 'day', 'month', 'annual'.
+settings_mizu_output_vars | 0 # Routing output. '0' for both KWT and IRF; '1' IRF only; '2' KWT only.
+settings_mizu_within_basin | 0 # '0' (no) or '1' (IRF routing). Flag to enable within-basin routing by mizuRoute. Should be set to 0 if SUMMA is run with "subRouting" decision "timeDlay".
+settings_mizu_make_outlet | 71028585 # Segment ID or IDs that should be set as network outlet. Specify multiple IDs separated by commas: X,Y,Z. Specify no IDs as: n/a. Note that this can also be done in the network shapefile.
+
+# Experiment settings - MESH
+settings_mesh_path | default # If 'default', uses 'root_path/domain_[name]/settings/mesh'.
+settings_mesh_topology | MESH_drainage_database.nc # Name of the river network topology file.
+settings_make_outlet | 71028585 # Segment ID or IDs that should be set as network outlet. Specify multiple IDs separated by commas: X,Y,Z. Specify no IDs as: n/a. Note that this can also be done in the network shapefile.
+
+# Postprocessing settings
+visualization_folder | default # If 'default', uses 'root_path/domain_[name]/visualization'.
+
+
+# Default folder structure
+# Example of the resulting folder structure in "root_path".
+# New domains will go into their own folder.
+
+- summWorkflow_data
+ |
+ |_ domain_BowAtBanff
+ | |
+ | |_ forcing
+ | | |_ 0_geopotential
+ | | |_ 1_raw_data
+ | | |_ 2_merged_data
+ | | |_ 3_basin_averaged_data
+ | | |_ 4_SUMMA_input
+ | |
+ | |_ parameters
+ | | |_ soilclass
+ | | | |_ 1_soil_classes_global
+ | | | |_ 2_soil_classes_domain
+ | | |
+ | | |_ landclass
+ | | | |_ 1_MODIS_raw_data
+ | | | |_ 2_vrt_native_crs
+ | | | |_ 3_vrt_epsg_4326
+ | | | |_ 4_domain_vrt_epsg_4326
+ | | | |_ 5_multiband_domain_vrt_epsg_4326
+ | | | |_ 6_tif_multiband
+ | | | |_ 7_mode_land_class
+ | | |
+ | | |_ dem
+ | | |_ 1_MERIT_hydro_raw_data
+ | | |_ 2_MERIT_hydro_unpacked_data
+ | | |_ 3_vrt
+ | | |_ 4_domain_vrt
+ | | |_ 5_elevation
+ | |
+ | |_ settings
+ | | |_ mizuRoute
+ | | |_ SUMMA
+ | |
+ | |_ shapefiles
+ | | |_ catchment
+ | | |_ catchment_intersection
+ | | | |_ with_dem
+ | | | |_ with_forcing
+ | | | |_ with_soil
+ | | | |_ with_veg
+ | | |_ forcing
+ | | |_ river_basins
+ | | |_ river_network
+ | |
+ | |_ simulations
+ | | |_run1
+ | | | |_ 0_settings_backup
+ | | | | |_ summa
+ | | | | |_ mizuRoute
+ | | | |_ summa
+ | | | | |_run_settings
+ | | | | |_SUMMA_logs
+ | | | |_ mizuRoute
+ | | | | |_run_settings
+ | | | | |_mizuRoute_logs
+ | | |_run2
+ | | |_ ...
+ | |
+ | |_ visualization
+ |
+ |_ domain_global
+ | |_ ...
+ |
+ |_ domain_northAmerica
+ | |_ ...
+ |
+ |_ installs
+ |_ mizuRoute
+ |_ SUMMA
diff --git a/tests/mesh_bowatbanff/domain_BowAtBanff_mesh.zip b/tests/mesh_bowatbanff/domain_BowAtBanff_mesh.zip
new file mode 100644
index 0000000..e707f81
Binary files /dev/null and b/tests/mesh_bowatbanff/domain_BowAtBanff_mesh.zip differ
diff --git a/tests/mesh_bowatbanff/test_mesh_bowatbanff.py b/tests/mesh_bowatbanff/test_mesh_bowatbanff.py
new file mode 100644
index 0000000..1d1a5b0
--- /dev/null
+++ b/tests/mesh_bowatbanff/test_mesh_bowatbanff.py
@@ -0,0 +1,128 @@
+import os
+import shutil
+import sys
+
+
+
+#%%
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")))
+from pathlib import Path
+import xarray as xr
+import geopandas as gpd
+import zipfile
+import pandas as pd
+import numpy as np
+
+from cwarhm.model_specific_processing import mesh as mesh
+from cwarhm.model_agnostic_processing import HRU as HRU
+import cwarhm.util.util as utl
+
+# set paths
+# results_folder_path: path to save the results (and to extract the test data to)
+# NOTE: results_folder_path needs to be set here AND in control_Bow_at_Banff_test.txt
+# as root_folder
+os.chdir(os.path.dirname(os.path.realpath(__file__)))
+results_folder_path = Path("/Users/ayx374/Documents/project/chwarm_test_results")
+
+# extract test data to test path
+with zipfile.ZipFile('domain_BowAtBanff_mesh.zip') as zip_ref:
+ zip_ref.extractall(results_folder_path)
+# read control file
+control_options = utl.read_summa_workflow_control_file('control_Bow_at_Banff_test.txt')
+
+
+#%%
+# create mesh topology
+drain_db_path = os.path.join(control_options['settings_mesh_path'],control_options['settings_mesh_topology'])
+
+mesh.generate_mesh_topology(control_options['river_network_shp_path'],
+ control_options['river_basin_shp_path'],
+ drain_db_path,
+ control_options['settings_make_outlet'])
+ranks, drain_db = mesh.reindex_topology_file(drain_db_path)
+# save file
+drain_db.to_netcdf(drain_db_path)
+
+# calculate land use fractions for MESH GRUS from CWARHM-SUMMA GRU/HRU land use counts
+gdf_land_use_counts = gpd.read_file(os.path.join(
+ control_options['intersect_land_path'],control_options['intersect_land_name']))
+df_gru_land_use_fractions = HRU.gru_fraction_from_hru_counts(gdf_land_use_counts)
+
+# set names for discretization
+fraction_type = ['Evergreen Needleleaf Forests','Woody Savannas','Savannas',
+'Grasslands', 'Permanent Wetlands', 'Urban and Built-up Lands', 'Permanent Snow and Ice',
+'Barren','Water Bodies']
+# and add fractions and Grouped Response Unit info to the drainage_db
+drain_db = mesh.add_gru_fractions_to_drainage_db(drain_db, df_gru_land_use_fractions, fraction_type)
+# save file
+drain_db.to_netcdf(drain_db_path)
+
+# remap forcing data from grids, to MESH GRUs (CWARHM-SUMMA maps to SUMMA HRUs)
+HRU.map_forcing_data(control_options['river_basin_shp_path'],
+ control_options['forcing_merged_path']+'/*.nc',
+ control_options['forcing_basin_avg_path']+'/',
+ var_names = ['LWRadAtm', 'SWRadAtm', 'pptrate', 'airpres', 'airtemp', 'spechum', 'windspd'],
+ var_lon='longitude', var_lat='latitude',
+ case_name = control_options['domain_name'] ,
+ temp_dir=control_options['intersect_forcing_path']+'/' ,
+ var_names_remapped=['FI', 'FB', 'PR', 'P0', 'TT', 'HU', 'UV']
+ )
+# reindex forcing file generated by EASYMORE to match mesh drainage dbb
+# first open the mf dataset with some preprocessing to set the lat and lon
+# as coordinates in stead of variables.
+def lonlatcoords(ds):
+ try:
+ ds = ds.set_coords(['latitude','longitude'])
+ return ds
+ except:
+ print('not needed')
+input_forcing = xr.open_mfdataset(control_options['forcing_basin_avg_path']+'/*.nc',preprocess=lonlatcoords)
+input_basin = gpd.read_file(control_options['river_basin_shp_path'])
+mesh_forcing = mesh.reindex_forcing_file(input_forcing, drain_db, input_basin)
+# save file
+mesh_forcing.to_netcdf(os.path.join(control_options['settings_mesh_path'],'MESH_input_era5.nc'))
+
+## mesh CLASS.ini file
+deglat = "{:.2f}".format(drain_db.lat.mean().values)
+deglon = "{:.2f}".format(drain_db.lon.mean().values)
+windspeed_ref_height = '40.00'
+temp_humid_ref_height = '40.00'
+surface_roughness_height = '50.00'
+ground_cover_flag = '-1.0'
+ILW = '1'
+n_grid = '51'
+n_GRU = len(drain_db.gru)
+datetime_start = pd.to_datetime(mesh_forcing.time[0].values)
+
+inif = mesh.MeshClassIniFile(os.path.join(control_options['settings_mesh_path'],'MESH_parameters_CLASS.ini'),
+ n_GRU,datetime_start)
+inif.set_header("test_bow_blah","bartus","Canmore")
+inif.set_area_info(deglat,deglon,windspeed_ref_height=40.00,
+ temp_humid_ref_height=40.00, surface_roughness_height=50.00,
+ ground_cover_flag=-1, ILW=1, n_grid=0)
+inif.set_start_end_times(datetime_start)
+inif.write_ini_file()
+
+## Run options
+optf = mesh.MeshRunOptionsIniFile(os.path.join(control_options['settings_mesh_path'],'MESH_input_run_options.ini'),
+ os.path.join(control_options['settings_mesh_path'],'MESH_input_era5.nc'))
+
+# hydrological parameters ini file
+mhi = mesh.MeshHydrologyIniFile(os.path.join(control_options['settings_mesh_path'],'MESH_parameters_hydrology.ini'),
+ n_gru=11)
+
+# reservoir file (txt dummy version)
+resi = mesh.MeshReservoirTxtFile(os.path.join(control_options['settings_mesh_path'],'MESH_input_reservoir.txt'))
+
+# soil layers file (default version)
+sli = mesh.MeshSoilLevelTxtFile(os.path.join(control_options['settings_mesh_path'],'MESH_input_soil_levels.txt'))
+# streamflow.txt file
+ffi = mesh.MeshInputStreamflowTxtFile(os.path.join(control_options['settings_mesh_path'],'MESH_input_streamflow.txt'),
+ forcing_file=mesh_forcing)
+# min_max parameter file
+mmpf = mesh.MeshMinMaxParameterTxtFile(os.path.join(control_options['settings_mesh_path'],'MESH_minmax_parameters.txt'))
+
+# parameter file
+parf = mesh.MeshParameterTxtFile(os.path.join(control_options['settings_mesh_path'],'MESH_parameters.txt'),
+ pd.DataFrame(np.array([['!>','DTMINUSR'],['RTE time-step [s]',300.0]]).transpose()
+ ))
\ No newline at end of file
diff --git a/tests/test_read_summa_workflow_control_file.py b/tests/test_read_summa_workflow_control_file.py
new file mode 100644
index 0000000..bee163d
--- /dev/null
+++ b/tests/test_read_summa_workflow_control_file.py
@@ -0,0 +1,12 @@
+import os
+
+from pathlib import Path
+import shutil
+import sys
+
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
+import cwarhm.util.util as utl
+
+control_file = '../dependencies/cwarhm-summa/0_control_files/control_Bow_at_Banff.txt'
+control_dict = utl.read_summa_workflow_control_file(control_file,comment_char='#',option_char='|')
+print(control_dict)
\ No newline at end of file
diff --git a/workflows/summa_bowatbanff/control_Bow_at_Banff_test.txt b/workflows/summa_bowatbanff/control_Bow_at_Banff_test.txt
new file mode 100644
index 0000000..285d414
--- /dev/null
+++ b/workflows/summa_bowatbanff/control_Bow_at_Banff_test.txt
@@ -0,0 +1,230 @@
+# SUMMA workflow setting file.
+# Characters '|' and '#' are used as separators to find the actual setting values. Any text behind '|' is assumed to be part of the setting value, unless preceded by '#'.
+
+# Note on path specification
+# If deviating from default paths, a full path must be specified. E.g. '/home/user/non-default/path'
+
+
+# Modeling domain settings
+root_path | /Users/ayx374/Documents/project/chwarm_test_results2 # Root folder where data will be stored.
+domain_name | BowAtBanff # Used as part of the root folder name for the prepared data.
+
+
+# Shapefile settings - SUMMA catchment file
+catchment_shp_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/catchment'.
+catchment_shp_name | bow_distributed_elevation_zone.shp # Name of the catchment shapefile. Requires extension '.shp'.
+catchment_shp_gruid | GRU_ID # Name of the GRU ID column (can be any numeric value, HRU's within a single GRU have the same GRU ID).
+catchment_shp_hruid | HRU_ID # Name of the HRU ID column (consecutive from 1 to total number of HRUs, must be unique).
+catchment_shp_area | HRU_area # Name of the catchment area column. Area must be in units [m^2]
+catchment_shp_lat | center_lat # Name of the latitude column. Should be a value representative for the HRU. Typically the centroid.
+catchment_shp_lon | center_lon # Name of the longitude column. Should be a value representative for the HRU. Typically the centroid.
+
+
+# Shapefile settings - mizuRoute river network file
+river_network_shp_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/river_network'.
+river_network_shp_name | bow_river_network_from_merit_hydro.shp # Name of the river network shapefile. Requires extension '.shp'.
+river_network_shp_segid | COMID # Name of the segment ID column.
+river_network_shp_downsegid | NextDownID # Name of the downstream segment ID column.
+river_network_shp_slope | slope # Name of the slope column. Slope must be in in units [length/length].
+river_network_shp_length | length # Name of the segment length column. Length must be in units [m].
+
+
+# Shapefile settings - mizuRoute catchment file
+river_basin_shp_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/river_basins'.
+river_basin_shp_name | bow_distributed.shp # Name of the routing subbasins shapefile needed for remapping. Requires extension '.shp'.
+river_basin_shp_rm_hruid | COMID # Name of the routing basin ID column.
+river_basin_shp_area | area # Name of the catchment area column. Area must be in units [m^2]
+river_basin_shp_hru_to_seg | hru_to_seg # Name of the column that shows which river segment each HRU connects to.
+
+
+# Shapefile settings - SUMMA-to-mizuRoute
+river_basin_needs_remap | yes # 'no' if routing basins map 1:1 onto model GRUs. 'yes' if river segments span multiple GRUs or if multiple segments are inside a single GRU.
+
+
+# Install settings
+github_summa | https://github.com/CH-Earth/summa # Replace this with the path to your own fork if you forked the repo.
+github_mizuroute | https://github.com/ncar/mizuroute # Replace this with the path to your own fork if you forked the repo.
+install_path_summa | default # If 'default', clones source code into 'root_path/installs/summa'.
+install_path_mizuroute | default # If 'default', clones source code into 'root_path/installs/mizuRoute'.
+exe_name_summa | summa.exe # Name of the compiled executable.
+exe_name_mizuroute | mizuroute.exe # Name of the compiled executable.
+
+
+# Forcing settings
+forcing_raw_time | 2008,2013 # Years to download: Jan-[from],Dec-[to].
+forcing_raw_space | 51.74/-116.55/50.95/-115.52 # Bounding box of the shapefile: lat_max/lon_min/lat_min/lon_max. Will be converted to ERA5 download coordinates in script. Order and use of '/' to separate values is mandatory.
+forcing_time_step_size | 3600 # Size of the forcing time step in [s]. Must be constant.
+forcing_measurement_height | 3 # Reference height for forcing measurements [m].
+forcing_shape_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/forcing'.
+forcing_shape_name | era5_grid.shp # Name of the forcing shapefile. Requires extension '.shp'.
+forcing_shape_lat_name | lat # Name of the latitude field that contains the latitude of ERA5 data points.
+forcing_shape_lon_name | lon # Name of the longitude field that contains the latitude of ERA5 data points.
+forcing_geo_path | default # If 'default', uses 'root_path/domain_[name]/forcing/0_geopotential'.
+forcing_raw_path | default # If 'default', uses 'root_path/domain_[name]/forcing/1_ERA5_raw_data'.
+forcing_merged_path | default # If 'default', uses 'root_path/domain_[name]/forcing/2_merged_data'.
+forcing_easymore_path | default # If 'default', uses 'root_path/domain_[name]/forcing/3_temp_easymore'.
+forcing_basin_avg_path | default # If 'default', uses 'root_path/domain_[name]/forcing/3_basin_averaged_data'.
+forcing_summa_path | default # If 'default', uses 'root_path/domain_[name]/forcing/4_SUMMA_input'.
+
+
+# Parameter settings - DEM
+parameter_dem_main_url | http://hydro.iis.u-tokyo.ac.jp/~yamadai/MERIT_Hydro/distribute/v1.0.1/ # Primary download URL for MERIT Hydro adjusted elevation data. Needs to be appended with filenames.
+parameter_dem_file_template | elv_{}{}.tar # Template for download file names.
+parameter_dem_raw_path | default # If 'default', uses 'root_path/domain_[name]/parameters/dem/1_MERIT_hydro_raw_data'.
+parameter_dem_unpack_path | default # If 'default', uses 'root_path/domain_[name]/parameters/dem/2_MERIT_hydro_unpacked_data'.
+parameter_dem_vrt1_path | default # If 'default', uses 'root_path/domain_[name]/parameters/dem/3_vrt'.
+parameter_dem_vrt2_path | default # If 'default', uses 'root_path/domain_[name]/parameters/dem/4_domain_vrt'.
+parameter_dem_tif_path | default # If 'default', uses 'root_path/domain_[name]/parameters/dem/5_elevation'.
+parameter_dem_tif_name | elevation.tif # Name of the final DEM for the domain. Must be in .tif format.
+
+
+# Parameter settings - soil
+parameter_soil_hydro_ID | 1361509511e44adfba814f6950c6e742 # ID of the Hydroshare resource to download.
+parameter_soil_raw_path | default # If 'default', uses 'root_path/domain_[name]/parameters/soilclass/1_soil_classes_global'.
+parameter_soil_domain_path | default # If 'default', uses 'root_path/domain_[name]/parameters/soilclass/2_soil_classes_domain'.
+parameter_soil_tif_name | soil_classes.tif # Name of the final soil class overview for the domain. Must be in .tif format.
+
+
+# Parameter settings - land
+parameter_land_list_path | default # If 'default', uses 'summaWorkflow_public/3b_parameters/MODIS_MCD12Q1_V6/1_download/'. Location of file with data download links.
+parameter_land_list_name | daac_mcd12q1_data_links.txt # Name of file that contains list of MODIS download urls.
+parameter_land_raw_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/1_MODIS_raw_data'.
+parameter_land_vrt1_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/2_vrt_native_crs'. Virtual dataset composed of .hdf files.
+parameter_land_vrt2_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/3_vrt_epsg_4326'. Virtual dataset projected in EPSG:4326.
+parameter_land_vrt3_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/4_domain_vrt_epsg_4326'. Virtual dataset cropped to model domain.
+parameter_land_vrt4_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/5_multiband_domain_vrt_epsg_4326'. Multiband cropped virtual dataset.
+parameter_land_tif_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/6_tif_multiband'.
+parameter_land_mode_path | default # If 'default', uses 'root_path/domain_[name]/parameters/landclass/7_mode_land_class'.
+parameter_land_tif_name | land_classes.tif # Name of the final landclass overview for the domain. Must be in .tif format.
+
+
+# Intersection settings
+intersect_dem_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/catchment_intersection/with_dem'.
+intersect_dem_name | catchment_with_merit_dem.shp # Name of the shapefile with intersection between catchment and MERIT Hydro DEM, stored in column 'elev_mean'.
+intersect_soil_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/catchment_intersection/with_soilgrids'.
+intersect_soil_name | catchment_with_soilgrids.shp # Name of the shapefile with intersection between catchment and SOILGRIDS-derived USDA soil classes, stored in columns 'USDA_{1,...n}'
+intersect_land_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/catchment_intersection/with_modis'.
+intersect_land_name | catchment_with_modis.shp # Name of the shapefile with intersection between catchment and MODIS-derived IGBP land classes, stored in columns 'IGBP_{1,...n}'
+intersect_forcing_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/catchment_intersection/with_forcing'.
+intersect_routing_path | default # If 'default', uses 'root_path/domain_[name]/shapefiles/catchment_intersection/with_routing'.
+intersect_routing_name | catchment_with_routing_basins.shp # Name of the shapefile with intersection between hydrologic model catchments and routing model catchments.
+
+
+# Experiment settings - general
+experiment_id | run1 # Descriptor of the modelling experiment; used as output folder name.
+experiment_time_start | default # Simulation start. If 'default', constructs this from 'forcing_raw_time' setting and uses all downloaded forcing data; e.g. '1979-01-01 00:00'.
+experiment_time_end | default # Simulation end. If 'default', constructs this from 'forcing_raw_time' setting and uses all downloaded forcing data; e.g. '1979-12-31 23:00'.
+experiment_output_summa | default # If 'default', uses 'root_path/domain_[name]/simulations/[experiment_id]/SUMMA'.
+experiment_output_mizuRoute | default # If 'default', uses 'root_path/domain_[name]/simulations/[experiment_id]/mizuRoute'.
+experiment_log_summa | default # If 'default', uses 'root_path/domain_[name]/simulations/[experiment_id]/SUMMA/SUMMA_logs'.
+experiment_log_mizuroute | default # If 'default', uses 'root_path/domain_[name]/simulations/[experiment_id]/mizuRoute/mizuRoute_logs'.
+experiment_backup_settings | yes # Flag to (not) create a copy of the model settings in the output folder; "no" or "yes". Copying settings may be undesirable if files are large.
+
+
+# Experiment settings - SUMMA
+settings_summa_path | default # If 'default', uses 'root_path/domain_[name]/settings/SUMMA'.
+settings_summa_filemanager | fileManager.txt # Name of the file with the SUMMA inputs.
+settings_summa_coldstate | coldState.nc # Name of the file with intial states.
+settings_summa_trialParams | trialParams.nc # Name of the file that can contain trial parameter values (note, can be empty of any actual parameter values but must be provided and must contain an 'hruId' variable).
+settings_summa_forcing_list | forcingFileList.txt # Name of the file that has the list of forcing files.
+settings_summa_attributes | attributes.nc # Name of the attributes file.
+settings_summa_connect_HRUs | no # Attribute setting: "no" or "yes". Tricky concept, see README in ./5_model_input/SUMMA/3f_attributes. If no; all HRUs modeled as independent columns (downHRUindex = 0). If yes; HRUs within each GRU are connected based on relative HRU elevation (highest = upstream, lowest = outlet).
+settings_summa_trialParam_n | 1 # Number of trial parameter specifications. Specify 0 if none are wanted (they can still be included in this file but won't be read).
+settings_summa_trialParam_1 | maxstep,900 # Name of trial parameter and value to assign. Value assumed to be float.
+
+
+# Experiment settings - mizuRoute
+settings_mizu_path | default # If 'default', uses 'root_path/domain_[name]/settings/mizuRoute'.
+settings_mizu_parameters | param.nml.default # Name of the routing parameters file.
+settings_mizu_topology | topology.nc # Name of the river network topology file.
+settings_mizu_remap | routing_remap.nc # Name of the optional catchment remapping file, for cases when SUMMA uses different catchments than mizuRoute.
+settings_mizu_control_file | mizuroute.control # Name of the control file.
+settings_mizu_routing_var | averageRoutedRunoff # Name of SUMMA output variable to use for routing.
+settings_mizu_routing_units | m/s # Units of the variable to be routed.
+settings_mizu_routing_dt | 3600 # Size of the routing time step [s].
+settings_mizu_output_freq | annual # Frequency with which mizuRoute generates new output files. Must be one of 'single', 'day', 'month', 'annual'.
+settings_mizu_output_vars | 0 # Routing output. '0' for both KWT and IRF; '1' IRF only; '2' KWT only.
+settings_mizu_within_basin | 0 # '0' (no) or '1' (IRF routing). Flag to enable within-basin routing by mizuRoute. Should be set to 0 if SUMMA is run with "subRouting" decision "timeDlay".
+settings_mizu_make_outlet | 71028585 # Segment ID or IDs that should be set as network outlet. Specify multiple IDs separated by commas: X,Y,Z. Specify no IDs as: n/a. Note that this can also be done in the network shapefile.
+
+
+# Postprocessing settings
+visualization_folder | default # If 'default', uses 'root_path/domain_[name]/visualization'.
+
+
+# Default folder structure
+# Example of the resulting folder structure in "root_path".
+# New domains will go into their own folder.
+
+- summWorkflow_data
+ |
+ |_ domain_BowAtBanff
+ | |
+ | |_ forcing
+ | | |_ 0_geopotential
+ | | |_ 1_raw_data
+ | | |_ 2_merged_data
+ | | |_ 3_basin_averaged_data
+ | | |_ 4_SUMMA_input
+ | |
+ | |_ parameters
+ | | |_ soilclass
+ | | | |_ 1_soil_classes_global
+ | | | |_ 2_soil_classes_domain
+ | | |
+ | | |_ landclass
+ | | | |_ 1_MODIS_raw_data
+ | | | |_ 2_vrt_native_crs
+ | | | |_ 3_vrt_epsg_4326
+ | | | |_ 4_domain_vrt_epsg_4326
+ | | | |_ 5_multiband_domain_vrt_epsg_4326
+ | | | |_ 6_tif_multiband
+ | | | |_ 7_mode_land_class
+ | | |
+ | | |_ dem
+ | | |_ 1_MERIT_hydro_raw_data
+ | | |_ 2_MERIT_hydro_unpacked_data
+ | | |_ 3_vrt
+ | | |_ 4_domain_vrt
+ | | |_ 5_elevation
+ | |
+ | |_ settings
+ | | |_ mizuRoute
+ | | |_ SUMMA
+ | |
+ | |_ shapefiles
+ | | |_ catchment
+ | | |_ catchment_intersection
+ | | | |_ with_dem
+ | | | |_ with_forcing
+ | | | |_ with_soil
+ | | | |_ with_veg
+ | | |_ forcing
+ | | |_ river_basins
+ | | |_ river_network
+ | |
+ | |_ simulations
+ | | |_run1
+ | | | |_ 0_settings_backup
+ | | | | |_ summa
+ | | | | |_ mizuRoute
+ | | | |_ summa
+ | | | | |_run_settings
+ | | | | |_SUMMA_logs
+ | | | |_ mizuRoute
+ | | | | |_run_settings
+ | | | | |_mizuRoute_logs
+ | | |_run2
+ | | |_ ...
+ | |
+ | |_ visualization
+ |
+ |_ domain_global
+ | |_ ...
+ |
+ |_ domain_northAmerica
+ | |_ ...
+ |
+ |_ installs
+ |_ mizuRoute
+ |_ SUMMA
diff --git a/workflows/summa_bowatbanff/make_folder_structure.py b/workflows/summa_bowatbanff/make_folder_structure.py
new file mode 100644
index 0000000..fa7cd2d
--- /dev/null
+++ b/workflows/summa_bowatbanff/make_folder_structure.py
@@ -0,0 +1,111 @@
+#!/usr/bin/env python
+# coding: utf-8
+
+'''
+SUMMA workflow: make folder structure
+Makes the initial folder structure for a given control file. All other files in the workflow will look for the file `control_active.txt` during their execution. This script:
+
+1. Copies the specified control file into `control_active.txt`;
+2. Prepares a folder structure using the settings in `control_active.txt`.
+3. Creates a copy of itself to be stored in the new folder structure.
+
+The destination folders are referred to as "domain folders".
+'''
+
+# Specify the control file to use
+sourceFile = 'control_Bow_at_Banff_test.txt'
+
+# --- Do not change below this line.
+
+# Modules
+import os
+from pathlib import Path
+from shutil import copyfile
+from datetime import datetime
+
+# --- Copy the control file into `control_active.txt`
+# Easy access to control file folder
+controlFolder = Path('../0_control_files')
+
+# Store the name of the 'active' file in a variable
+controlFile = 'control_active.txt'
+
+# Copy
+copyfile( controlFolder/sourceFile, controlFolder/controlFile );
+
+# --- Create the main domain folders
+# Function to extract a given setting from the control file
+def read_from_control( file, setting ):
+
+ # Open 'control_active.txt' and ...
+ with open(file) as contents:
+ for line in contents:
+
+ # ... find the line with the requested setting
+ if setting in line:
+ break
+
+ # Extract the setting's value
+ substring = line.split('|',1)[1] # Remove the setting's name (split into 2 based on '|', keep only 2nd part)
+ substring = substring.split('#',1)[0] # Remove comments, does nothing if no '#' is found
+ substring = substring.strip() # Remove leading and trailing whitespace, tabs, newlines
+
+ # Return this value
+ return substring
+
+# Find the path where the domain folders need to go
+# Immediately store as a 'Path' to avoid issues with '/' and '\' on different operating systems
+rootPath = Path( read_from_control(controlFolder/controlFile,'root_path') )
+
+# Find the domain name
+domainName = read_from_control(controlFolder/controlFile,'domain_name')
+
+# Create the domain folder inside 'root'
+domainFolder = 'domain_' + domainName
+Path( rootPath / domainFolder ).mkdir(parents=True, exist_ok=True)
+
+
+# --- Make the shapefile folders
+# Find the catchment shapefile folder in 'control_active'
+catchmentShapeFolder = read_from_control(controlFolder/controlFile,'catchment_shp_path')
+networkShapeFolder = read_from_control(controlFolder/controlFile,'river_network_shp_path')
+riverBasinFolder = read_from_control(controlFolder/controlFile,'river_basin_shp_path')
+
+# Specify the default paths if required
+if catchmentShapeFolder == 'default':
+ catchmentShapeFolder = 'shapefiles/catchment'
+if networkShapeFolder == 'default':
+ networkShapeFolder = 'shapefiles/river_network'
+if riverBasinFolder == 'default':
+ riverBasinFolder = 'shapefiles/river_basins'
+
+# Try to make the shapefile folders; does nothing if the folder already exists
+Path( rootPath / domainFolder / catchmentShapeFolder ).mkdir(parents=True, exist_ok=True)
+Path( rootPath / domainFolder / networkShapeFolder ).mkdir(parents=True, exist_ok=True)
+Path( rootPath / domainFolder / riverBasinFolder ).mkdir(parents=True, exist_ok=True)
+
+
+# --- Code provenance
+# Generates a basic log file in the domain folder and copies the control file and itself there.
+# Create a log folder
+logFolder = '_workflow_log'
+Path( rootPath / domainFolder / logFolder ).mkdir(parents=True, exist_ok=True)
+
+# Copy the control file
+copyfile(controlFolder / sourceFile, rootPath / domainFolder / logFolder / sourceFile);
+
+# Copy this script
+thisFile = 'make_folder_structure.py'
+copyfile(thisFile, rootPath / domainFolder / logFolder / thisFile);
+
+# Get current date and time
+now = datetime.now()
+
+# Create a log file
+logFile = now.strftime('%Y%m%d') + '_log.txt'
+with open(rootPath / domainFolder / logFolder / logFile, 'w') as file:
+
+ lines = ['Log generated by ' + thisFile + ' on ' + now.strftime('%Y/%m/%d %H:%M:%S') + '\n',
+ 'Generated folder structure using ' + sourceFile]
+ for txt in lines:
+ file.write(txt)
\ No newline at end of file
diff --git a/workflows/summa_bowatbanff/test_bow_at_banff.py b/workflows/summa_bowatbanff/test_bow_at_banff.py
new file mode 100644
index 0000000..661ac7c
--- /dev/null
+++ b/workflows/summa_bowatbanff/test_bow_at_banff.py
@@ -0,0 +1,126 @@
+import os
+import shutil
+import sys
+
+#%%
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")))
+from pathlib import Path
+from cwarhm.wrappers import cwarhm_summa as fm
+from cwarhm.model_specific_processing import mizuroute as mzr
+from cwarhm.data_specific_processing import era5
+from cwarhm.data_specific_processing import merit
+import cwarhm.util.util as utl
+
+os.chdir(os.path.dirname(os.path.realpath(__file__)))
+cwarhm_summa_folder = os.path.abspath("../../dependencies/cwarhm-summa")
+
+### SETTINGS
+test_data_path = Path("/Users/localuser/Research/summaWorkflow_data/domain_BowAtBanff")
+# set control file to use
+control_options = utl.read_summa_workflow_control_file('control_Bow_at_Banff_test.txt')
+# copy control file to cwarhm summa folder
+fm.change_control_file_in_submodule(cwarhm_summa_folder, 'control_Bow_at_Banff_test.txt')
+# set path where results go
+results_folder_path = control_options['root_path'] + '/domain_' + control_options['domain_name']
+
+reset_test = False
+if reset_test:
+ ## manage context of test: copy test data to required folder
+ # remove results folder if exists
+ if os.path.exists(results_folder_path):
+ shutil.rmtree(results_folder_path)
+ # create results folder
+ # copy test_data to results folder as in mwah results and previous result share the same root folder (by default)
+
+ shutil.copytree(test_data_path, results_folder_path)
+ fm.create_folder_structure(cwarhm_summa_folder)
+
+#%% start example
+#%% download data (downloads not included in example) - data specific input layer - part 1
+## data can be downloaded by the test_data_download.py script.
+
+
+#%% process downloaded data - data specific input layer - part 2
+
+### forcing ERA5 ###
+
+era5.merge_era5_surface_and_pressure_level_downloads(control_options['forcing_raw_path'], control_options['forcing_merged_path'], control_options['forcing_raw_time'])
+# fm.merge_forcing(cwarhm_summa_folder, control_options['forcing_raw_time']) # replaced by functions from era5
+
+fm.create_ERA5_shapefile(cwarhm_summa_folder)
+
+## merit hydro ##
+fm.unpack_merit_hydro(cwarhm_summa_folder)
+fm.create_merit_hydro_virtual_dataset(cwarhm_summa_folder)
+fm.specify_merit_hydro_subdomain(cwarhm_summa_folder)
+fm.convert_merit_hydro_vrt_to_tif(cwarhm_summa_folder)
+
+## MODIS ##
+fm.create_modis_virtual_dataset(cwarhm_summa_folder)
+fm.reproject_modis_virtual_dataset(cwarhm_summa_folder)
+fm.specify_modis_subdomain(cwarhm_summa_folder)
+fm.create_multiband_modis_vrt(cwarhm_summa_folder)
+fm.convert_modis_vrt_to_tif(cwarhm_summa_folder)
+fm.find_mode_modis_landclass(cwarhm_summa_folder)
+
+## SOILGRIDS ##
+fm.extract_soilgrids_domain(cwarhm_summa_folder)
+
+#%% model agnostic mapping layer
+fm.sort_catchment_shape(cwarhm_summa_folder)
+
+fm.find_HRU_elevation(cwarhm_summa_folder)
+fm.find_HRU_land_classes(cwarhm_summa_folder)
+#%%
+fm.find_HRU_soil_classes(cwarhm_summa_folder)
+
+#%%
+fm.make_single_weighted_forcing_file(cwarhm_summa_folder)
+fm.make_all_weighted_forcing_files(cwarhm_summa_folder)
+fm.temperature_lapsing_and_datastep(cwarhm_summa_folder)
+
+#%% Model specific processing layer
+## Build repo clones and compile
+fm.clone_summa_repo(cwarhm_summa_folder)
+fm.clone_mizuroute_repo(cwarhm_summa_folder)
+
+## Compiling needs adjustment for local OS
+##fm.compile_summa(cwarhm_summa_folder)
+##fm.compile_mizuroute(cwarhm_summa_folder)
+
+## mizuRoute ##
+fm.copy_mizuroute_base_settings(cwarhm_summa_folder)
+mzr.generate_mizuroute_topology(control_options['river_network_shp_path'], control_options['river_basin_shp_path'],
+ os.path.join(control_options['settings_mizu_path'],control_options['settings_mizu_topology']),
+ control_options['settings_mizu_make_outlet'])
+fm.create_mizuroute_network_topology_file(cwarhm_summa_folder)
+
+fm.remap_summa_catchments_to_mizurouting(cwarhm_summa_folder)
+fm.create_mizuroute_control_file(cwarhm_summa_folder)
+
+#%%
+## SUMMA ##
+fm.copy_summa_base_settings(cwarhm_summa_folder)
+fm.create_summa_file_manager(cwarhm_summa_folder)
+fm.create_summa_forcing_file_list(cwarhm_summa_folder)
+fm.create_summa_cold_state(cwarhm_summa_folder)
+fm.create_summa_trial_parameters(cwarhm_summa_folder)
+fm.initialize_summa_attributes_nc(cwarhm_summa_folder)
+fm.insert_soilclass_from_hist_into_summa_attributes(cwarhm_summa_folder)
+fm.insert_landclass_from_hist_into_summa_attributes(cwarhm_summa_folder)
+fm.insert_elevation_from_hist_into_summa_attributes(cwarhm_summa_folder)
+
+#%% run models
+## Note that models need to be compiled
+
+#fm.run_summa(cwarhm_summa_folder)
+#fm.run_mizuroute(cwarhm_summa_folder)
+
+#%% evaluate models
+#fm.plot_mizuroute_and_summa_shapefiles(cwarhm_summa_folder)
+#fm.plot_ERA5_download_coordinates_and_catchment_shapefile(cwarhm_summa_folder)
+#fm.plot_geospatial_parameters_to_model_elements(cwarhm_summa_folder)
+#fm.plot_SWE_and_streamflow_per_HRU(cwarhm_summa_folder)
+#fm.plot_forcing_grid_vs_catchment_averaged(cwarhm_summa_folder)
+## fm.plot_SWE_SM_ET_Q_per_GRU(cwarhm_summa_folder) not all data local!
+## fm.plot_temperature_lapse_rates(cwarhm_summa_folder) not all data local!
\ No newline at end of file
diff --git a/workflows/summa_bowatbanff/test_data_downloads.py b/workflows/summa_bowatbanff/test_data_downloads.py
new file mode 100644
index 0000000..81bda31
--- /dev/null
+++ b/workflows/summa_bowatbanff/test_data_downloads.py
@@ -0,0 +1,58 @@
+import os
+from pathlib import Path
+import shutil
+import sys
+
+#%%
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")))
+from cwarhm.wrappers import cwarhm_summa as fm
+from cwarhm.model_specific_processing import mizuroute as mzr
+from cwarhm.data_specific_processing import era5
+from cwarhm.data_specific_processing import merit
+import cwarhm.util.util as utl
+
+os.chdir(os.path.dirname(os.path.realpath(__file__)))
+cwarhm_summa_folder = os.path.abspath("../../dependencies/cwarhm-summa")
+
+#%% download data (downloads not included in example) - data specific input layer - part 1
+## the lines below are included if the test data is not available locally
+
+control_options = utl.read_summa_workflow_control_file('control_Bow_at_Banff_test.txt')
+results_folder_path = control_options['root_path'] + '/domain_' + control_options['domain_name']
+utl.build_folder_structure(control_options)
+
+## ERA5
+bbox = [float(i) for i in control_options['forcing_raw_space'].split('/')]
+#years = utl.unpack_year_range(control_options['forcing_raw_time'])
+years = [2010]
+era_5_raw_data_path = control_options['forcing_raw_path']
+year = years[0]
+request_list, download_paths = era5.generate_download_requests(year,bbox,era_5_raw_data_path,'surface_level')
+era5.wait_for_and_download_requests(request_list,download_paths,sleep=30)
+
+
+## MERIT DEM
+# TODO: check bbox... hopefully the same as for ERA5...
+merit_raw_folder = control_options['parameter_dem_raw_path']
+credentials = utl.read_merit_credentials_file()
+merit.download_merit(merit_raw_folder, credentials, ['elv'], bbox=bbox)
+
+fm.download_modis_mcd12q1_v6(cwarhm_summa_folder)
+fm.download_soilgrids_soilclass_global(cwarhm_summa_folder)
+
+#%% obsolete: test of running downloads in parallel
+#def main():
+# control_options = utl.read_summa_workflow_control_file('control_Bow_at_Banff_test.txt')
+# utl.build_folder_structure(control_options)
+# ## ERA5
+# bbox = [float(i) for i in control_options['forcing_raw_space'].split('/')]
+# #years = utl.unpack_year_range(control_options['forcing_raw_time'])
+# years = [2010]
+# era_5_raw_data_path = control_options['forcing_raw_path']
+# #era5.run_era5_download_in_parallel(years,bbox,era_5_raw_data_path,'surface_level')
+# #era5.run_era5_download_in_parallel(years,bbox,era_5_raw_data_path,'pressure_level')
+
+#if __name__ == '__main__':
+# main()
+
+
diff --git a/workflows/summa_bowatbanff/test_merithydro.py b/workflows/summa_bowatbanff/test_merithydro.py
new file mode 100644
index 0000000..091bf66
--- /dev/null
+++ b/workflows/summa_bowatbanff/test_merithydro.py
@@ -0,0 +1,58 @@
+import os
+
+from pathlib import Path
+import shutil
+import sys
+
+#%%
+sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")))
+import cwarhm.util.util as utl
+from cwarhm.wrappers import cwarhm_summa as fm
+import cwarhm.data_specific_processing.merit as merit
+
+os.chdir(os.path.dirname(os.path.realpath(__file__)))
+cwarhm_summa_folder = "/Users/ayx374/Documents/GitHub/forks/summaWorkflow_public/dependencies/cwarhm-summa"
+#cwarhm_summa_folder = "./dependencies/cwarhm-summa"
+results_folder_path = Path("/Users/ayx374/Documents/project/chwarm_test_results/domain_BowAtBanff")
+test_data_path = Path("/Users/ayx374/Documents/project/chwarm_test_data/domain_BowAtBanff")
+
+# set control file to use with wrappers
+fm.change_control_file_in_submodule(cwarhm_summa_folder, 'control_Bow_at_Banff_test.txt')
+# read control file to use with functions
+control_options = utl.read_summa_workflow_control_file('/Users/ayx374/Documents/GitHub/forks/summaWorkflow_public/dependencies/cwarhm-summa/0_control_files/control_Bow_at_Banff_test.txt')
+
+reset_test = False
+if reset_test:
+ ## manage context of test: copy test data to required folder
+ # remove results folder if exists
+ if os.path.exists(results_folder_path):
+ shutil.rmtree(results_folder_path)
+ # create results folder
+ # copy test_data to results folder as in mwah results and previous result share the same root folder (by default)
+
+ shutil.copytree(test_data_path, results_folder_path)
+ fm.create_folder_structure(cwarhm_summa_folder)
+
+#%% start example
+
+#%% download data (downloads not included in example) - data specific input layer - part 1
+## the lines below are included if the test data is not available locally
+
+## fm.run_download_ERA5_pressureLevel_paralell(cwarhm_summa_folder)
+## fm.run_download_ERA5_surfaceLevel_paralell(cwarhm_summa_folder)
+## fm.download_merit_hydro_adjusted_elevation(cwarhm_summa_folder)
+## fm.download_modis_mcd12q1_v6(cwarhm_summa_folder)
+## fm.download_soilgrids_soilclass_global(cwarhm_summa_folder)
+
+#%% process downloaded data - data specific input layer - part 2
+
+### forcing ERA5 ###
+#fm.merge_forcing(cwarhm_summa_folder)
+#fm.create_ERA5_shapefile(cwarhm_summa_folder)
+
+## merit hydro ##
+merit.extract_merit_tars(control_options)
+fm.unpack_merit_hydro(cwarhm_summa_folder)
+fm.create_merit_hydro_virtual_dataset(cwarhm_summa_folder)
+fm.specify_merit_hydro_subdomain(cwarhm_summa_folder)
+fm.convert_merit_hydro_vrt_to_tif(cwarhm_summa_folder)
\ No newline at end of file