Skip to content
Merged
27 changes: 20 additions & 7 deletions cipher_parse/cipher_input.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,6 +333,7 @@ def from_voronoi(
num_phases=None,
random_seed=None,
is_periodic=False,
combine_phases=None,
):
geometry = CIPHERGeometry.from_voronoi(
num_phases=num_phases,
Expand All @@ -343,6 +344,7 @@ def from_voronoi(
size=size,
random_seed=random_seed,
is_periodic=is_periodic,
combine_phases=combine_phases,
)

inp = cls(
Expand All @@ -366,6 +368,7 @@ def from_seed_voronoi(
solution_parameters,
random_seed=None,
is_periodic=False,
combine_phases=None,
):
return cls.from_voronoi(
seeds=seeds,
Expand All @@ -378,6 +381,7 @@ def from_seed_voronoi(
solution_parameters=solution_parameters,
random_seed=random_seed,
is_periodic=is_periodic,
combine_phases=combine_phases,
)

@classmethod
Expand All @@ -393,6 +397,7 @@ def from_random_voronoi(
solution_parameters,
random_seed=None,
is_periodic=False,
combine_phases=None,
):
return cls.from_voronoi(
num_phases=num_phases,
Expand All @@ -405,6 +410,7 @@ def from_random_voronoi(
solution_parameters=solution_parameters,
random_seed=random_seed,
is_periodic=is_periodic,
combine_phases=combine_phases,
)

@classmethod
Expand All @@ -418,13 +424,15 @@ def from_voxel_phase_map(
outputs,
solution_parameters,
random_seed=None,
combine_phases=None,
):
geometry = CIPHERGeometry(
voxel_phase=voxel_phase,
materials=materials,
interfaces=interfaces,
size=size,
random_seed=random_seed,
combine_phases=combine_phases,
)
inp = cls(
geometry=geometry,
Expand All @@ -445,6 +453,7 @@ def from_dream3D(
solution_parameters,
container_labels=None,
phase_type_map=None,
combine_phases=None,
):
default_container_labels = {
"SyntheticVolumeDataContainer": "SyntheticVolumeDataContainer",
Expand Down Expand Up @@ -559,6 +568,7 @@ def from_dream3D(
components=components,
outputs=outputs,
solution_parameters=solution_parameters,
combine_phases=combine_phases,
)

@property
Expand Down Expand Up @@ -604,14 +614,17 @@ def write_yaml(self, path, separate_mappings=False):

self.geometry._validate_interface_map()

phase_mat_str = compress_1D_array_string(self.geometry.phase_material + 1) + "\n"
vox_phase_str = (
compress_1D_array_string(self.geometry.voxel_phase.flatten(order="F") + 1)
+ "\n"
)
int_str = (
compress_1D_array_string(self.geometry.interface_map_int.flatten() + 1) + "\n"
phase_mat_str = compress_1D_array_string(self.geometry.phase_material + 1)
vox_phase_str = compress_1D_array_string(
self.geometry.voxel_phase.flatten(order="F") + 1
)
int_str = compress_1D_array_string(self.geometry.interface_map_int.flatten() + 1)

if not separate_mappings:
# CIPHER does not like trailing new lines in the mapping text files:
phase_mat_str += "\n"
vox_phase_str += "\n"
int_str += "\n"

if separate_mappings:
phase_mat_map = "phase_material_mapping.txt"
Expand Down
84 changes: 65 additions & 19 deletions cipher_parse/cipher_output.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def __init__(
self._cipher_stdout = None
self._geometries = None # assigned by set_geometries

for idx, i in enumerate(options["save_outputs"]):
for idx, i in enumerate(options["save_outputs"] or ()):
if i.get("number") is not None and i.get("time_interval") is not None:
raise ValueError(
f"Specify at most one of 'number' and 'time_interval' for save "
Expand Down Expand Up @@ -219,6 +219,7 @@ def get_incremental_data(self):

inp_dat = self.get_input_YAML_data()
grid_size = inp_dat["grid_size"]
grid_size_3D = grid_size if len(grid_size) == 3 else [*grid_size, 1]

outfile_base = inp_dat["solution_parameters"]["outfile"]
output_lookup = {
Expand All @@ -232,7 +233,7 @@ def get_incremental_data(self):

# get which files to include for each output/derived output
outputs_keep_idx = {}
for save_out_i in self.options["save_outputs"]:
for save_out_i in self.options["save_outputs"] or ():
if "number" in save_out_i:
keep_idx = get_subset_indices(len(vtu_file_list), save_out_i["number"])
elif "time_interval" in save_out_i:
Expand All @@ -246,15 +247,28 @@ def get_incremental_data(self):
incremental_data = []
for file_i_idx, file_i in enumerate(vtu_file_list):
print(f"Reading VTU file {file_i.name}...", flush=True)
mesh = pv.get_reader(file_i).read()
try:
mesh = pv.get_reader(file_i).read()
except Exception:
print(f"Failed to read VTU file {file_i.name}.", flush=True)
continue
vtu_file_name = file_i.name

img_data = pv.ImageData(dimensions=grid_size)
img_data = pv.ImageData(dimensions=grid_size_3D)

print(
f"Resampling VTU file {file_i.name} onto an image-data mesh...",
flush=True,
)
img_mesh = img_data.sample(mesh)
try:
img_mesh = img_data.sample(mesh)
except Exception:
print(
f"Failed to re-sample VTU file {file_i.name} onto an image "
f"data grid.",
flush=True,
)
continue

inc_data_i = {
"increment": int(re.search(r"\d+", vtu_file_name).group()),
Expand All @@ -267,30 +281,52 @@ def get_incremental_data(self):

standard_outputs = {}
for name in output_lookup:
arr_flat = img_mesh.get_array(output_lookup[name])
try:
arr_flat = img_mesh.get_array(output_lookup[name])
except KeyError:
print(
f"Failed to get array {output_lookup[name]} from file "
f"{file_i.name}",
flush=True,
)
continue
arr = arr_flat.reshape(img_mesh.dimensions, order="F")
if name in STANDARD_OUTPUTS_TYPES:
arr = arr.astype(STANDARD_OUTPUTS_TYPES[name])
standard_outputs[name] = np.array(arr) # convert from pyvista_ndarray

derived_outputs = {}
for derive_out_i in self.options["derive_outputs"]:
for derive_out_i in self.options["derive_outputs"] or ():
name_i = derive_out_i["name"]
func = DERIVED_OUTPUTS_FUNCS[name_i]
func_args = {"input_data": inp_dat}
func_args.update(
{i: standard_outputs[i] for i in DERIVED_OUTPUTS_REQUIREMENTS[name_i]}
)
try:
func_args.update(
{
i: standard_outputs[i]
for i in DERIVED_OUTPUTS_REQUIREMENTS[name_i]
}
)
except KeyError:
print(
f"Failed to prepare arguments for derived output function "
f"{func.__name__!r}.",
flush=True,
)
continue
derived_outputs[name_i] = func(**func_args)

for out_name, keep_idx in outputs_keep_idx.items():
if file_i_idx in keep_idx:
if out_name in DERIVED_OUTPUTS_REQUIREMENTS:
# a derived output:
inc_data_i[out_name] = derived_outputs[out_name]
else:
# a standard output:
inc_data_i[out_name] = standard_outputs[out_name]
try:
if out_name in DERIVED_OUTPUTS_REQUIREMENTS:
# a derived output:
inc_data_i[out_name] = derived_outputs[out_name]
else:
# a standard output:
inc_data_i[out_name] = standard_outputs[out_name]
except KeyError:
continue

incremental_data.append(inc_data_i)

Expand Down Expand Up @@ -434,7 +470,7 @@ def from_JSON_file(cls, path):
data = json.load(fp)
return cls.from_JSON(data)

def to_zarr(self, path):
def to_zarr(self, path, overwrite=False, close_store=None):
"""Save to a persistent zarr store.

This does not yet save `geometries`.
Expand All @@ -455,18 +491,28 @@ def to_zarr(self, path):
out_group.create_dataset(
name="stdout_file_str",
data=self.stdout_file_str.splitlines(),
overwrite=overwrite,
)
out_group.create_dataset(
name="input_YAML_file_str",
data=self.input_YAML_file_str.splitlines(),
overwrite=overwrite,
)
inc_dat_group = out_group.create_group("incremental_data", overwrite=True)
for idx, inc_dat_i in enumerate(self.incremental_data):
inc_dat_i_group = inc_dat_group.create_group(f"{idx}")
inc_dat_i_group.attrs.put({k: inc_dat_i[k] for k in INC_DATA_NON_ARRAYS})
for k in inc_dat_i:
if k not in INC_DATA_NON_ARRAYS:
inc_dat_i_group.create_dataset(name=k, data=inc_dat_i[k])
inc_dat_i_group.create_dataset(
name=k, data=inc_dat_i[k], overwrite=overwrite
)

if path.endswith(".zip") and close_store is None:
close_store = True

if close_store:
out_group.store.close()

return out_group

Expand All @@ -477,7 +523,7 @@ def from_zarr(cls, path, cipher_input=None, quiet=True):
This does not yet load `geometries`.

"""
group = zarr.open_group(store=path)
group = zarr.open_group(store=path, mode="r")
attrs = group.attrs.asdict()
kwargs = {
"directory": attrs["directory"],
Expand Down
Loading
Loading