forked from ggrill/Free-Flowing-Rivers
-
Notifications
You must be signed in to change notification settings - Fork 0
/
fra_start.py
188 lines (136 loc) · 5.52 KB
/
fra_start.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
"""
Launching script that cycles through a set of tasks as defined in the sheet 'START' in the Excel
workbook 'confix.xlsx'
The script prepares an analysis environment by creating a output directory for the specific
model run with a name using the current date.
It backups the model code into the directory and creates output geodatabases, and sub-folders for
statistics and an excel sheet that stores results
"""
import datetime
import logging
import os
import shutil
import sys
import tools.helper as tools
from config import config
fd = config.var
from scripts import ffr_run_dof, ffr_run_sed, ffr_run_dor, ffr_run_csi
def start(stamp, sequence, para, scenarios, st_flds, paths):
"""
Starts the assessment and executes the scripts indicated in the Excel file
:param stamp: time stamp
:param sequence: list of scripts and weather they should run
:param para: dictionary of parameters from Excel file
:param scenarios: List of scenarios from Excel file
:param st_flds: required fields in stream database
:param paths: dictionary of paths as defined by setup function
:return:
"""
if sequence == ["NO", "NO", "NO", "NO"]:
print ("Nothing to run. Exit!")
sys.exit(0)
else:
pass
if sequence["run_dof"] == "YES":
prt('\n' + "*********")
prt("RUN DOF")
prt("*********" + '\n')
ffr_run_dof.run_dof(stamp, para, paths)
if sequence["run_dor"] == "YES":
prt('\n' + "*********")
prt("RUN DOR")
prt("*********" + '\n')
ffr_run_dor.run_dor(stamp, para, paths)
if sequence["run_sed"] == "YES":
prt('\n' + "*********")
prt("RUN SED")
prt("*********" + '\n')
ffr_run_sed.run_sed(para, paths)
if sequence["run_csi"] == "YES":
prt('\n' + "*********")
prt("RUN CSI")
prt("*********" + '\n')
ffr_run_csi.run_csi(stamp, para, scenarios, st_flds, paths)
def setup(base, out, xls_full, stamp, xls_file_name):
"""
Setup assessment
:param base: local path of assessment from where the script is launched
:param out: output folder
:param xls_full: full path and file name of config.xls file
:param stamp: time stamp
:param xls_file_name: file name of excel file
:return:
"""
# Create CSI geodatabase
gdb_name = "CSI"
gdb_full_path, gdb_file_name = tools.create_gdb(out, gdb_name)
# Make statistics folder, where excel files are stored
sta_folder = os.path.join(out, "STAT")
tools.create_path(sta_folder)
# Make temp csi global_stats.py folder
sta_csi_folder = os.path.join(out, "STATS_CSI")
tools.create_path(sta_csi_folder)
# Make temp csi global_stats.py folder
sta_pickle_folder = os.path.join(out, "STATS_PICKLES")
tools.create_path(sta_pickle_folder)
# Make test_data pickle folder
test_pickle_folder = os.path.join(out, "TEST_PICKLES")
tools.create_path(test_pickle_folder)
# Copy Excel-file into results folder for documentation
shutil.copy(xls_full, out + "\\" + xls_file_name)
cde_folder = os.path.join(out, "CODE")
tools.create_path(cde_folder)
# Copy entire code directory into output folder for reference
tools.copytree(src=base, dst=cde_folder)
# Copy Python code into results folder for documentation
python_code = os.path.abspath(__file__)
tail = os.path.basename(python_code)
shutil.copy(xls_full, out + "\\" + tail)
# Setup logging
tools.setup_logging(out)
# Create Excel writer
writer, excel_file = tools.get_writer(sta_folder, stamp)
tools.create_results_sheet(writer)
paths = {"writer": writer,
"excel_file": excel_file,
"gdb_full_path": gdb_full_path,
"sta_csi_folder": sta_csi_folder,
"sta_pickle_folder": sta_pickle_folder,
"test_pickle_folder": test_pickle_folder}
return paths
def prt(txt):
logging.info(txt)
print(txt)
def main():
time_stamp = tools.get_stamp()
print("Starting model at {}".format(datetime.datetime.now()))
path = os.path.realpath(__file__)
basepath, filename = os.path.split(path)
xls_full_path = os.path.join(basepath + r"\\config", "config.xlsx")
xls_path, xls_filename = os.path.split(xls_full_path)
# Loading model parameters from EXCEL file
sequence, para, scenarios, fields = tools.load_parameters(xls_full_path)
# Create a time-stamped folder under "output_folder"
output_folder = os.path.join(para["output_folder"], "Results_" + time_stamp)
# These fields must be present in source dataset feature class
st_flds = [fd.REACH_ID, fd.GOID, fd.NOID, fd.NUOID, fd.NDOID,
fd.CON_ID, fd.BAS_ID, fd.BAS_NAME, fd.RIV_ORD,
fd.DIS_AV_CMS, fd.LENGTH_KM,
fd.VOLUME_TCM, fd.ERO_YLD_TON, fd.HYFALL, fd.BB_ID, fd.BB_NAME,
fd.BB_OCEAN,
fd.BB_LEN_KM, fd.BB_VOL_TCM, fd.BB_DIS_ORD, fd.INC
] + fields + [fd.FLD]
ba_flds = [para["svol_field"]]
tools.check_fields(para["streams_fc"], st_flds)
tools.check_fields(para["dams_fc"], ba_flds)
# 1 Setting up file structure
paths = setup(base=basepath,
out=output_folder,
xls_full=xls_full_path,
stamp=time_stamp,
xls_file_name=xls_filename)
prt("Results will be in: " + str(output_folder))
start(time_stamp, sequence, para, scenarios, st_flds, paths)
# ----- ## ----- ## ----- ## ----- ## ----- ## ----- ## ----- ## ----- ##
if __name__ == '__main__':
main()