@@ -63,14 +63,14 @@ def update_vds_paths(dset, modify_function):
6363
6464
6565def make_virtual_snapshot (
66- snapshot , auxilary_snapshots , output_file , absolute_paths = False
66+ snapshot , auxiliary_snapshots , output_file , absolute_paths = False
6767):
6868 """
69- Given a snapshot and auxilary files, create
69+ Given a snapshot and auxiliary files, create
7070 a new virtual snapshot with all datasets combine.
7171
7272 snapshot: Path to the snapshot file
73- auxilary_snapshots : List of auxiliary file patterns
73+ auxiliary_snapshots : List of auxiliary file patterns
7474 output_file: Path to the output virtual snapshot
7575 absolute_paths: If True, use absolute paths; if False, use relative paths
7676 """
@@ -83,19 +83,19 @@ def make_virtual_snapshot(
8383
8484 # Calculate directories for path updates
8585 abs_snapshot_dir = os .path .abspath (os .path .dirname (snapshot ))
86- abs_auxilary_dirs = [
86+ abs_auxiliary_dirs = [
8787 os .path .abspath (os .path .dirname (aux .format (file_nr = 0 )))
88- for aux in auxilary_snapshots
88+ for aux in auxiliary_snapshots
8989 ]
9090 abs_output_dir = os .path .abspath (os .path .dirname (output_file ))
9191
9292 if absolute_paths :
9393 snapshot_dir = abs_snapshot_dir
94- auxilary_dirs = abs_auxilary_dirs
94+ auxiliary_dirs = abs_auxiliary_dirs
9595 else :
9696 snapshot_dir = os .path .relpath (abs_snapshot_dir , abs_output_dir )
97- auxilary_dirs = [
98- os .path .relpath (aux_dir , abs_output_dir ) for aux_dir in abs_auxilary_dirs
97+ auxiliary_dirs = [
98+ os .path .relpath (aux_dir , abs_output_dir ) for aux_dir in abs_auxiliary_dirs
9999 ]
100100
101101 # Create path replacement functions
@@ -107,15 +107,15 @@ def replace_path(old_path):
107107 return replace_path
108108
109109 replace_snapshot_path = make_replace_path (snapshot_dir )
110- auxilary_path_replacers = [make_replace_path (d ) for d in auxilary_dirs ]
110+ auxiliary_path_replacers = [make_replace_path (d ) for d in auxiliary_dirs ]
111111
112- all_auxilary_datasets = {}
112+ all_auxiliary_datasets = {}
113113
114- for aux_index , auxilary in enumerate (auxilary_snapshots ):
114+ for aux_index , auxiliary in enumerate (auxiliary_snapshots ):
115115
116- # Check which datasets exist in the auxilary files
116+ # Check which datasets exist in the auxiliary files
117117 # and store their attributes and datatype
118- filename = auxilary .format (file_nr = 0 )
118+ filename = auxiliary .format (file_nr = 0 )
119119 dset_attrs = {}
120120 dset_dtype = {}
121121 with h5py .File (filename , "r" ) as infile :
@@ -128,35 +128,35 @@ def replace_path(old_path):
128128 attrs = dict (infile [f"PartType{ ptype } /{ dset } " ].attrs )
129129 dtype = infile [f"PartType{ ptype } /{ dset } " ].dtype
130130
131- # Some auxilary files are missing these attributes
131+ # Some auxiliary files are missing these attributes
132132 if not "Value stored as physical" in attrs :
133133 print (f"Setting comoving attrs for PartType{ ptype } /{ dset } " )
134134 attrs ["Value stored as physical" ] = [1 ]
135135 attrs ["Property can be converted to comoving" ] = [0 ]
136136
137- # Add a flag that these datasets are stored in the auxilary files
138- attrs ["Auxilary file" ] = [1 ]
137+ # Add a flag that these datasets are stored in the auxiliary files
138+ attrs ["auxiliary file" ] = [1 ]
139139
140140 # Store the values we need for later
141141 dset_attrs [f"PartType{ ptype } " ][dset ] = attrs
142142 dset_dtype [f"PartType{ ptype } " ][dset ] = dtype
143143
144- # Check we don't have this dataset in any of the other auxilary files
144+ # Check we don't have this dataset in any of the other auxiliary files
145145 dset_path = f"PartType{ ptype } /{ dset } "
146- if dset_path in all_auxilary_datasets :
147- other_file = all_auxilary_datasets [f"PartType{ ptype } /{ dset } " ]
146+ if dset_path in all_auxiliary_datasets :
147+ other_file = all_auxiliary_datasets [f"PartType{ ptype } /{ dset } " ]
148148 raise ValueError (
149- f"{ dset_path } is in { auxilary } and { other_file } "
149+ f"{ dset_path } is in { auxiliary } and { other_file } "
150150 )
151- all_auxilary_datasets [dset_path ] = auxilary
151+ all_auxiliary_datasets [dset_path ] = auxiliary
152152
153- # Loop over input auxilary files to get dataset shapes
153+ # Loop over input auxiliary files to get dataset shapes
154154 file_nr = 0
155155 filenames = []
156156 shapes = []
157157 counts = []
158158 while True :
159- filename = auxilary .format (file_nr = file_nr )
159+ filename = auxiliary .format (file_nr = file_nr )
160160 if os .path .exists (filename ):
161161 filenames .append (filename )
162162 with h5py .File (filename , "r" ) as infile :
@@ -178,7 +178,7 @@ def replace_path(old_path):
178178 break
179179 file_nr += 1
180180 if file_nr == 0 :
181- raise IOError (f"Failed to find files matching: { auxilary } " )
181+ raise IOError (f"Failed to find files matching: { auxiliary } " )
182182
183183 # Loop over particle types in the output
184184 for ptype in range (7 ):
@@ -223,7 +223,7 @@ def replace_path(old_path):
223223 # Update paths for this newly created auxiliary dataset
224224 update_vds_paths (
225225 outfile [f"PartType{ ptype } /{ dset } " ],
226- auxilary_path_replacers [aux_index ],
226+ auxiliary_path_replacers [aux_index ],
227227 )
228228
229229 # Copy GroupNr_bound to HaloCatalogueIndex, since
@@ -240,7 +240,7 @@ def replace_path(old_path):
240240 # Update paths for HaloCatalogueIndex too
241241 update_vds_paths (
242242 outfile [f"PartType{ ptype } /HaloCatalogueIndex" ],
243- auxilary_path_replacers [aux_index ],
243+ auxiliary_path_replacers [aux_index ],
244244 )
245245
246246 # Update paths for all original snapshot datasets
@@ -251,7 +251,7 @@ def replace_path(old_path):
251251 dset = outfile [f"{ ptype_name } /{ dset_name } " ]
252252 if dset .is_virtual :
253253 # Check if this is an auxiliary dataset (skip those, already handled)
254- if dset .attrs .get ("Auxilary file" , [0 ])[0 ] != 1 :
254+ if dset .attrs .get ("auxiliary file" , [0 ])[0 ] != 1 :
255255 # This is an original snapshot dataset
256256 update_vds_paths (dset , replace_snapshot_path )
257257
@@ -266,7 +266,7 @@ def replace_path(old_path):
266266 # For description of parameters run the following: $ python make_virtual_snapshot.py --help
267267 parser = argparse .ArgumentParser (
268268 description = (
269- "Link SWIFT snapshots with SWIFT auxilary snapshots (snapshot-like"
269+ "Link SWIFT snapshots with SWIFT auxiliary snapshots (snapshot-like"
270270 "files with the same number of particles in the same order as the"
271271 "snapshot, but with less metadata), such as the SOAP memberships"
272272 )
@@ -278,11 +278,11 @@ def replace_path(old_path):
278278 help = "Name of the SWIFT virtual snapshot file, e.g. snapshot_{snap_nr:04}.hdf5" ,
279279 )
280280 parser .add_argument (
281- "--auxilary -snapshots" ,
281+ "--auxiliary -snapshots" ,
282282 type = str ,
283283 nargs = "+" ,
284284 required = True ,
285- help = "One of more format strings for auxilary files, e.g. membership_{snap_nr:04}.{file_nr}.hdf5" ,
285+ help = "One of more format strings for auxiliary files, e.g. membership_{snap_nr:04}.{file_nr}.hdf5" ,
286286 )
287287 parser .add_argument (
288288 "--output-file" ,
@@ -312,16 +312,16 @@ def replace_path(old_path):
312312 virtual_snapshot = args .virtual_snapshot .format (snap_nr = args .snap_nr )
313313 output_file = args .output_file .format (snap_nr = args .snap_nr )
314314
315- # We don't want to replace {file_nr} for auxilary snapshots
316- auxilary_snapshots = [
315+ # We don't want to replace {file_nr} for auxiliary snapshots
316+ auxiliary_snapshots = [
317317 filename .format_map (SafeDict ({"snap_nr" : args .snap_nr }))
318- for filename in args .auxilary_snapshots
318+ for filename in args .auxiliary_snapshots
319319 ]
320320
321321 # Make a new virtual snapshot with group info
322322 make_virtual_snapshot (
323323 virtual_snapshot ,
324- auxilary_snapshots ,
324+ auxiliary_snapshots ,
325325 output_file ,
326326 absolute_paths = args .absolute_paths ,
327327 )
0 commit comments