diff --git a/docs/source/creating_initial_conditions/index.rst b/docs/source/creating_initial_conditions/index.rst index 5d034be7..eed1609f 100644 --- a/docs/source/creating_initial_conditions/index.rst +++ b/docs/source/creating_initial_conditions/index.rst @@ -7,94 +7,113 @@ different set of internal units (specified in your parameter file) that does not necessarily need to be the same set of units that initial conditions are specified in. Nevertheless, it is important to ensure that units in the initial conditions are all *consistent* with each other. To facilitate this, -we use :mod:`unyt` arrays. The below example generates randomly placed gas -particles with uniform densities. +we use :class:`~swiftsimio.objects.cosmo_array` data. The below example +generates randomly placed gas particles with uniform densities. The functionality to create initial conditions is available through -the :mod:`swiftsimio.writer` sub-module, and the top-level -:obj:`swiftsimio.Writer` object. +the :mod:`swiftsimio.snapshot_writer` sub-module, and the top-level +:class:`~swiftsimio.Writer` class. -Note that the properties that :mod:`swiftsimio` requires in the initial -conditions are the only ones that are actually read by SWIFT; other fields -will be left un-read and as such should not be included in initial conditions -files. +.. warning:: -A current known issue is that due to inconsistencies with the initial -conditions and simulation snapshots, :mod:`swiftsimio` is not actually able -to read the inititial conditions that it produces. We are aiming to fix this -in an upcoming release. + The properties that :mod:`swiftsimio` requires in the initial + conditions are the only ones that are actually read by SWIFT; other fields + will be left un-read and as such cannot be included in initial conditions + files using the :class:`~swiftsimio.Writer`. Any additional + attributes set will be silently ignored. +.. warning:: + + You need to be careful that your choice of unit system does + *not* allow values over 2^31, i.e. you need to ensure that your + provided values (with units) when *written* to the file are safe to + be interpreted as (single-precision) floats. The only exception to + this is coordinates which are stored in double precision. Example ^^^^^^^ .. code-block:: python - from swiftsimio import Writer - from swiftsimio.units import cosmo_units - - import unyt import numpy as np - + import unyt as u + from swiftsimio import Writer, cosmo_array + from swiftsimio.metadata.writer.unit_systems import cosmo_unit_system + + # number of gas particles + n_p = 1000 + # scale factor of 1.0 + a = 1.0 # Box is 100 Mpc - boxsize = 100 * unyt.Mpc - - # Generate object. cosmo_units corresponds to default Gadget-oid units + lbox = 100 + boxsize = cosmo_array( + [lbox, lbox, lbox], + u.Mpc, + comoving=True, + scale_factor=a, + scale_exponent=1, + ) + + # Create the Writer object. cosmo_unit_system corresponds to default Gadget-like units # of 10^10 Msun, Mpc, and km/s - x = Writer(cosmo_units, boxsize) - - # 32^3 particles. - n_p = 32**3 - - # Randomly spaced coordinates from 0, 100 Mpc in each direction - x.gas.coordinates = np.random.rand(n_p, 3) * (100 * unyt.Mpc) + w = Writer(unit_system=cosmo_unit_system, boxsize=boxsize, scale_factor=a) + + # Randomly spaced coordinates from 0 to lbox Mpc in each direction + w.gas.coordinates = cosmo_array( + np.random.rand(n_p, 3) * lbox, + u.Mpc, + comoving=True, + scale_factor=w.scale_factor, + scale_exponent=1, + ) # Random velocities from 0 to 1 km/s - x.gas.velocities = np.random.rand(n_p, 3) * (unyt.km / unyt.s) + w.gas.velocities = cosmo_array( + np.random.rand(n_p, 3), + u.km / u.s, + comoving=True, + scale_factor=w.scale_factor, + scale_exponent=1, + ) # Generate uniform masses as 10^6 solar masses for each particle - x.gas.masses = np.ones(n_p, dtype=float) * (1e6 * unyt.msun) + w.gas.masses = cosmo_array( + np.ones(n_p, dtype=float) * 1e6, + u.solMass, + comoving=True, + scale_factor=w.scale_factor, + scale_exponent=0, + ) # Generate internal energy corresponding to 10^4 K - x.gas.internal_energy = ( - np.ones(n_p, dtype=float) * (1e4 * unyt.kb * unyt.K) / (1e6 * unyt.msun) + w.gas.internal_energy = cosmo_array( + np.ones(n_p, dtype=float) * 1e4 / 1e6, + u.kb * u.K / u.solMass, + comoving=True, + scale_factor=w.scale_factor, + scale_exponent=-2, ) - # Generate initial guess for smoothing lengths based on MIPS - x.gas.generate_smoothing_lengths(boxsize=boxsize, dimension=3) + # Generate initial guess for smoothing lengths based on mean inter-particle spacing + w.gas.generate_smoothing_lengths() + + # w.gas.particle_ids can optionally be set, otherwise they are auto-generated - # If IDs are not present, this automatically generates - x.write("test.hdf5") + # write the initial conditions out to a file + w.write("ics.hdf5") -Then, running ``h5glance`` on the resulting ``test.hdf5`` produces: +Then, running ``h5glance`` (``pip install h5glance``) on the resulting ``ics.hdf5`` +produces: .. code-block:: bash - test.hdf5 - ├Header - │ └5 attributes: - │ ├BoxSize: 100.0 - │ ├Dimension: array [int64: 1] - │ ├Flag_Entropy_ICs: 0 - │ ├NumPart_Total: array [int64: 6] - │ └NumPart_Total_HighWord: array [int64: 6] + ics.hdf5 + ├Header (9 attributes) ├PartType0 - │ ├Coordinates [float64: 32768 × 3] - │ ├InternalEnergy [float64: 32768] - │ ├Masses [float64: 32768] - │ ├ParticleIDs [float64: 32768] - │ ├SmoothingLength [float64: 32768] - │ └Velocities [float64: 32768 × 3] - └Units - └5 attributes: - ├Unit current in cgs (U_I): array [float64: 1] - ├Unit length in cgs (U_L): array [float64: 1] - ├Unit mass in cgs (U_M): array [float64: 1] - ├Unit temperature in cgs (U_T): array [float64: 1] - └Unit time in cgs (U_t): array [float64: 1] - -**Note** you do need to be careful that your choice of unit system does -*not* allow values over 2^31, i.e. you need to ensure that your -provided values (with units) when *written* to the file are safe to -be interpreted as (single-precision) floats. The only exception to -this is coordinates which are stored in double precision. + │ ├Coordinates [float64: 1000 × 3] (9 attributes) + │ ├InternalEnergy [float64: 1000] (9 attributes) + │ ├Masses [float64: 1000] (9 attributes) + │ ├ParticleIDs [int64: 1000] (9 attributes) + │ ├SmoothingLengths [float64: 1000] (9 attributes) + │ └Velocities [float64: 1000 × 3] (9 attributes) + └Units (5 attributes) diff --git a/swiftsimio/__init__.py b/swiftsimio/__init__.py index b4812536..ce8083b0 100644 --- a/swiftsimio/__init__.py +++ b/swiftsimio/__init__.py @@ -8,7 +8,7 @@ from pathlib import Path from .reader import SWIFTDataset -from .snapshot_writer import SWIFTSnapshotWriter +from .snapshot_writer import SWIFTSnapshotWriter as Writer from .masks import SWIFTMask from .statistics import SWIFTStatisticsFile from ._file_utils import open_path_or_handle @@ -28,13 +28,12 @@ import swiftsimio.objects as objects from swiftsimio.objects import cosmo_array, cosmo_quantity import swiftsimio.visualisation as visualisation -import swiftsimio.units as units import swiftsimio.subset_writer as subset_writer import swiftsimio.statistics as statistics __all__ = [ "SWIFTDataset", - "SWIFTSnapshotWriter", + "Writer", "SWIFTMask", "SWIFTStatisticsFile", "SWIFTUnits", @@ -50,7 +49,6 @@ "cosmo_array", "cosmo_quantity", "visualisation", - "units", "subset_writer", "statistics", "name", @@ -58,7 +56,6 @@ "mask", "load", "load_statistics", - "Writer", ] name = "swiftsimio" @@ -186,6 +183,3 @@ def load_statistics(filename: str | Path) -> SWIFTStatisticsFile: SWIFT statistics file path. """ return SWIFTStatisticsFile(filename=filename) - - -Writer = SWIFTSnapshotWriter diff --git a/swiftsimio/metadata/__init__.py b/swiftsimio/metadata/__init__.py index 37d7358a..1e3a9d58 100644 --- a/swiftsimio/metadata/__init__.py +++ b/swiftsimio/metadata/__init__.py @@ -4,18 +4,16 @@ Metadata includes a listing of the datasets available in a file. """ -from .particle import particle_types, particle_fields +from .particle import particle_types from .soap import soap_types -from .unit import unit_types, unit_fields +from .unit import unit_types from .metadata import metadata_fields from .writer import required_fields __all__ = [ "particle_types", - "particle_fields", "soap_types", "unit_types", - "unit_fields", "metadata_fields", "required_fields", ] diff --git a/swiftsimio/metadata/particle/__init__.py b/swiftsimio/metadata/particle/__init__.py index fa4ef064..fe886a3b 100644 --- a/swiftsimio/metadata/particle/__init__.py +++ b/swiftsimio/metadata/particle/__init__.py @@ -1,14 +1,5 @@ -"""Define names of particle types and fields available for each.""" +"""Define names of particle types.""" -from .particle_fields import ( - gas, - dark_matter, - boundary, - sinks, - stars, - black_holes, - neutrinos, -) from .particle_types import ( particle_name_underscores, particle_name_class, @@ -19,11 +10,4 @@ "particle_name_underscores", "particle_name_class", "particle_name_text", - "gas", - "dark_matter", - "boundary", - "sinks", - "stars", - "black_holes", - "neutrinos", ] diff --git a/swiftsimio/metadata/particle/particle_fields.py b/swiftsimio/metadata/particle/particle_fields.py deleted file mode 100644 index e5a00806..00000000 --- a/swiftsimio/metadata/particle/particle_fields.py +++ /dev/null @@ -1,62 +0,0 @@ -"""Define the particle fields for the various particle types.""" - -_shared = { - "Coordinates": "coordinates", - "Masses": "masses", - "ParticleIDs": "particle_ids", - "Velocities": "velocities", - "Potential": "potential", -} - -_baryon = { - "ElementAbundance": "element_abundance", - "Maximal Temperature": "maximal_temperature", - "Maximal Temperature scale-factor": "maximal_temperature_scale_factor", - "Maximal Temperature time": "maximal_temperature_time", - "IronMassFracFromSNIa": "iron_mass_frac_from_sn1a", - "MetalMassFracFromAGB": "metal_mass_frac_from_agb", - "MetalMassFracFromSNII": "metal_mass_frac_from_snii", - "MetalMassFracFromSNIa": "metal_mass_frac_from_sn1a", - "Metallicity": "metallicity", - "SmoothedElementAbundance": "smoothed_element_abundance", - "SmoothedIronMassFracFromSNIa": "smoothed_iron_mass_frac_from_sn1a", - "SmoothedMetallicity": "smoothed_metallicity", - "TotalMassFromAGB": "total_mass_from_agb", - "TotalMassFromSNII": "total_mass_from_snii", -} - -gas = { - "Density": "density", - "Entropy": "entropy", - "InternalEnergy": "internal_energy", - "SmoothingLength": "smoothing_length", - "Pressure": "pressure", - "SFR": "sfr", - "Temperature": "temperature", - "Viscosity": "viscosity", - "sSFR": "specific_sfr", - "MaterialID": "material_id", - "Diffusion": "diffusion", - "RadiatedEnergy": "radiated_energy", - **_shared, - **_baryon, -} - -dark_matter = {**_shared} - -boundary = {**_shared} - -sinks = {**_shared} - -stars = { - "BirthDensity": "birth_density", - "BirthTime": "birth_time", - "InitialMasses": "initial_masses", - "SmoothingLength": "smoothing_length", - **_shared, - **_baryon, -} - -black_holes = {**_shared} - -neutrinos = {**_shared} diff --git a/swiftsimio/metadata/unit/__init__.py b/swiftsimio/metadata/unit/__init__.py index e8c678db..297ba261 100644 --- a/swiftsimio/metadata/unit/__init__.py +++ b/swiftsimio/metadata/unit/__init__.py @@ -5,13 +5,9 @@ possible_base_units, find_nearest_base_unit, ) -from .unit_fields import a_exponents, generate_units, generate_dimensions __all__ = [ - "a_exponents", "unit_names_to_unyt", "possible_base_units", "find_nearest_base_unit", - "generate_units", - "generate_dimensions", ] diff --git a/swiftsimio/metadata/unit/unit_fields.py b/swiftsimio/metadata/unit/unit_fields.py deleted file mode 100644 index df0dfca9..00000000 --- a/swiftsimio/metadata/unit/unit_fields.py +++ /dev/null @@ -1,122 +0,0 @@ -""" -Define units "by hand" for writing snapshot-like files. - -Contains the information for the units that determine the particle fields. -When reading files we read this metadata from the files, but when writing -we may need to generate this information. -""" - -from unyt import g, cm, s, statA, K, Unit -from typing import Callable - -# scale factor exponents for writing snapshot-like files: -a_exponents = {"coordinates": 1, "internal_energies": -2} - - -def generate_units( - mass: Unit, length: Unit, time: Unit, current: Unit, temperature: Unit -) -> dict[str, dict[str, Unit]]: - """ - Generate unit dictionaries. - - Units for: - - mass, length, time, current, and temperature - - Parameters - ---------- - mass : Unit - The mass unit. - - length : Unit - The length unit. - - time : Unit - The time unit. - - current : Unit - The current unit. - - temperature : Unit - The temperature unit. - - Returns - ------- - dict[str, dict[str, Unit]] - Dictionary with a dictonary for each particle type defining the units for each - field. - """ - shared = { - "coordinates": length, - "masses": mass, - "particle_ids": None, - "velocities": length / time, - "potential": length * length / (time * time), - } - - gas = { - "internal_energy": (length / time) ** 2, - "smoothing_length": length, - "pressure": mass / (length * time**2), - "temperature": temperature, - **shared, - } - - dark_matter = {**shared} - - boundary = {**shared} - - sinks = {**shared} - - stars = { - "smoothing_length": length, - **shared, - } - - black_holes = {**shared} - - neutrinos = {**shared} - - return { - "gas": gas, - "dark_matter": dark_matter, - "boundary": boundary, - "sinks": sinks, - "stars": stars, - "black_holes": black_holes, - "neutrinos": neutrinos, - } - - -def generate_dimensions( - generate_unit_func: Callable[..., dict] = generate_units, -) -> dict: - """ - Get the dimensions for the above. - - Parameters - ---------- - generate_unit_func : Callable[..., dict] - Function to set units for particle fields. - - Returns - ------- - dict - Dictionary specifying dimensions. - - """ - units = generate_unit_func(g, cm, s, statA, K) - - dimensions = {} - - for particle_type, particle_type_units in units.items(): - dimensions[particle_type] = {} - - for unit_name, unit in particle_type_units.items(): - try: - dimensions[particle_type][unit_name] = unit.dimensions - except AttributeError: - # Units that have "none" dimensions - dimensions[particle_type][unit_name] = 1 - - return dimensions diff --git a/swiftsimio/metadata/writer/required_fields.py b/swiftsimio/metadata/writer/required_fields.py index a270b1fd..18a83772 100644 --- a/swiftsimio/metadata/writer/required_fields.py +++ b/swiftsimio/metadata/writer/required_fields.py @@ -1,15 +1,20 @@ """A list of required fields for writing a SWIFT dataset, for each particle type.""" +import unyt.dimensions as dim + _shared = { - "coordinates": "Coordinates", - "particle_ids": "ParticleIDs", - "velocities": "Velocities", - "masses": "Masses", + "coordinates": {"handle": "Coordinates", "dimensions": dim.length}, + "particle_ids": {"handle": "ParticleIDs", "dimensions": dim.dimensionless}, + "velocities": {"handle": "Velocities", "dimensions": dim.velocity}, + "masses": {"handle": "Masses", "dimensions": dim.mass}, } gas = { - "smoothing_length": "SmoothingLength", - "internal_energy": "InternalEnergy", + "smoothing_lengths": {"handle": "SmoothingLengths", "dimensions": dim.length}, + "internal_energy": { + "handle": "InternalEnergy", + "dimensions": dim.length**2 / dim.time**2, + }, **_shared, } @@ -19,7 +24,10 @@ sinks = {**_shared} -stars = {**_shared, "smoothing_length": "SmoothingLength"} +stars = { + **_shared, + "smoothing_lengths": {"handle": "SmoothingLengths", "dimensions": dim.length}, +} black_holes = {**_shared} diff --git a/swiftsimio/metadata/writer/unit_systems.py b/swiftsimio/metadata/writer/unit_systems.py new file mode 100644 index 00000000..86114825 --- /dev/null +++ b/swiftsimio/metadata/writer/unit_systems.py @@ -0,0 +1,21 @@ +""" +Define unit systems that may be useful to astronomers. + +In particular, we define ``cosmo_units`` that are Gadget-like default units: + ++ Unit length = Mpc ++ Unit velocity = km/s ++ Unit mass = 10^10 Msun ++ Unit temperature = K + +Also contains unit conversion factors, to simplify units wherever possible. +""" + +import unyt + +cosmo_units = unyt.UnitSystem( + "cosmological", + unyt.Mpc, + unyt.unyt_quantity(1e10, units=unyt.solMass), + unyt.unyt_quantity(1.0, units=unyt.s * unyt.Mpc / unyt.km).to(unyt.Gyr), +) diff --git a/swiftsimio/optional_packages.py b/swiftsimio/optional_packages.py index 36f029e4..1f410ddc 100644 --- a/swiftsimio/optional_packages.py +++ b/swiftsimio/optional_packages.py @@ -169,6 +169,7 @@ def cuda_jit(*args, **kwargs): # NOQA # hdfstream try: import hdfstream + HDFSTREAM_AVAILABLE = True except (ImportError, ModuleNotFoundError): hdfstream = None diff --git a/swiftsimio/snapshot_writer.py b/swiftsimio/snapshot_writer.py index cc9ffaf4..0ddebb0f 100644 --- a/swiftsimio/snapshot_writer.py +++ b/swiftsimio/snapshot_writer.py @@ -1,21 +1,22 @@ -"""Contains functions and objects for creating SWIFT datasets.""" +"""Provide utilities to create SWIFT files usable as initial conditions.""" import unyt import h5py import numpy as np +from warnings import warn from typing import Callable from sympy import Symbol from functools import reduce from swiftsimio import metadata +from swiftsimio.metadata.writer.unit_systems import cosmo_units from swiftsimio.objects import cosmo_array -from swiftsimio.metadata.unit.unit_fields import a_exponents def _ptype_str_to_int(ptype_str: str) -> int: """ - Convert a string like `"PartType0"` to an integer (in this example, `0`). + Convert a string like ``"PartType0"`` to an integer (in this example, ``0``). Parameters ---------- @@ -34,23 +35,33 @@ class __SWIFTWriterParticleDataset(object): """ A particle dataset for _writing_ with. - This is explicitly different to the one used for reading, as it requires a very - different feature set. Perhaps one day they will be merged, but for now this keeps the - code used to manage both simple. + This is explicitly different to the one used for reading + (:class:`~swiftsimio.reader.__SWIFTGroupDataset`), as it requires a very different + feature set. Parameters ---------- + writer : ~swiftsimio.snapshot_writer.SWIFTSnapshotWriter + A reference to the writer containing this particle dataset writer. + unit_system : unyt.UnitSystem or str Either be a string (e.g. "cgs"), or a UnitSystem as defined by unyt - specifying the units to be used. Users may wish to consider the - cosmological unit system provided in swiftsimio.units.cosmo_units. + specifying the units to be used. Users may wish to use the cosmological unit + system provided as + ``from swiftsimio.metadata.writer.unit_systems import cosmo_units``. particle_type : int - The particle type of the dataset. Numbering convention is the same as - SWIFT, with 0 corresponding to gas, etc. as usual. + The particle type of the dataset. Numbering convention is the same as SWIFT, with + ``0`` corresponding to gas, etc., as usual. """ - def __init__(self, unit_system: unyt.UnitSystem | str, particle_type: int) -> None: + def __init__( + self, + writer: "SWIFTSnapshotWriter", + unit_system: unyt.UnitSystem | str, + particle_type: int, + ) -> None: + self.writer = writer self.unit_system = unit_system self.particle_type = particle_type @@ -66,8 +77,8 @@ def generate_empty_properties(self) -> None: """ Generate the empty properties accessed by setters and getters. - Initially all of the _{name} values are set to None. Note that we - only generate required properties. + Initially all of the ``_{name}`` values are set to ``None``. Note that we only + generate required properties. """ for name in getattr(metadata.required_fields, self.particle_name).keys(): setattr(self, f"_{name}", None) @@ -81,7 +92,7 @@ def check_empty(self) -> bool: Returns ------- bool - True if all datasets are empty. + ``True`` if all datasets are empty. """ for name in getattr(metadata.required_fields, self.particle_name).keys(): if getattr(self, f"_{name}") is not None: @@ -95,13 +106,13 @@ def check_consistent(self) -> bool: Checks the following: - + That all required fields (apart from particle_ids) are not None, - + That all required fields have the same length + + that all required fields (apart from ``particle_ids``) are not ``None``, + + that all required fields have the same length. If those are true, - + self.n_part is set with the total number of particles of this type - + self.requires_particle_ids_before_write is set to a boolean. + + ``self.n_part`` is set with the total number of particles of this type + + ``self.requires_particle_ids_before_write`` is set to a boolean. Returns ------- @@ -136,32 +147,33 @@ def check_consistent(self) -> bool: return True - def generate_smoothing_lengths(self, boxsize: cosmo_array, dimension: int) -> None: + def generate_smoothing_lengths(self) -> None: """ - Automatically generate smoothing lengths as 2 * the mean interparticle spacing. + Generate smoothing lengths as 2 times the mean interparticle spacing. - This only works for a uniform boxsize (i.e. one that has the same length in all - dimensions). If boxsize is a list, we just use the 0th member. - - Parameters - ---------- - boxsize : cosmo_array or cosmo_quantity - Size of SWIFT computational box. - - dimension : int - Number of box dimensions. + This only works for a boxsize that has the same length in all dimensions. """ - try: - boxsize = boxsize[0] - except IndexError: - boxsize = boxsize + if "smoothing_lengths" not in getattr( + metadata.required_fields, self.particle_name + ): + raise RuntimeError( + "Cannot generate smoothing lengths for particle types that don't require " + "them." + ) + + if not (np.diff(self.writer.boxsize) == 0).all(): + raise ValueError( + "To generate smoothing lengths box side lengths must all be equal." + ) + dimension = self.writer.dimension + boxlength = self.writer.boxsize[0] n_part = self.coordinates.shape[0] - mips = boxsize / (n_part ** (1.0 / dimension)) + mips = boxlength / (n_part ** (1.0 / dimension)) - smoothing_lengths = mips * np.ones(n_part, dtype=float) + smoothing_lengths = 2 * mips * np.ones(n_part, dtype=float) - self.smoothing_length = smoothing_lengths + self.smoothing_lengths = smoothing_lengths return @@ -184,9 +196,10 @@ def write_particle_group(self, file_handle: h5py.File, compress: bool) -> None: else: compression = None - for name, output_handle in getattr( + for name, required_field_info in getattr( metadata.required_fields, self.particle_name ).items(): + output_handle = required_field_info["handle"] particle_group.create_dataset( output_handle, data=getattr(self, name), compression=compression ) @@ -202,64 +215,65 @@ def write_particle_group_metadata( Parameters ---------- file_handle : h5py.File - HDF5 output file being written. + HDF5 output file to write to. dset_attributes : dict Dictionary containg metadata to attach to group. """ - for name, output_handle in getattr( + for name, required_field_info in getattr( metadata.required_fields, self.particle_name ).items(): + output_handle = required_field_info["handle"] obj = file_handle[f"/{self.particle_type}/{output_handle}"] for attr_name, attr_value in dset_attributes[output_handle].items(): obj.attrs.create(attr_name, attr_value) return - def get_attributes(self, scale_factor: float) -> dict: + def get_attributes(self) -> dict: """ - Return a dictionary containg the attributes to attach to the dataset. - - Parameters - ---------- - scale_factor : float - The cosmological scale factor of the dataset. + Return a dictionary containing the attributes to attach to the dataset. Returns ------- dict Dictionary containg the attributes applying to the dataset. """ + # annoyingly unyt calls "current" "current_mks", but not a big deal + from unyt.dimensions import mass, length, temperature, time, current_mks + attributes_dict = {} - for name, output_handle in getattr( + for name, required_field_info in getattr( metadata.required_fields, self.particle_name ).items(): + output_handle = required_field_info["handle"] field = getattr(self, name) + if not isinstance(field, cosmo_array): + raise ValueError( + f"Provide {name} data to swiftsimio.Writer as" + " swiftsimio.cosmo_array's (i.e. including both unit and cosmology" + " information)." + ) # Find the exponents for each of the dimensions dim_exponents = get_dimensions(field.units.dimensions) - - # Find the scale factor associated quantities - a_exp = a_exponents.get(name, 0) - a_factor = scale_factor**a_exp - attributes_dict[output_handle] = { "Conversion factor to CGS (not including cosmological corrections)": [ field.unit_quantity.in_cgs() ], "Conversion factor to physical CGS " "(including cosmological corrections)": [ - field.unit_quantity.in_cgs() * a_factor + field.unit_quantity.in_cgs() * field.cosmo_factor.a_factor + ], + "U_I exponent": [dim_exponents[current_mks]], + "U_L exponent": [dim_exponents[length]], + "U_M exponent": [dim_exponents[mass]], + "U_T exponent": [dim_exponents[temperature]], + "U_t exponent": [dim_exponents[time]], + "a-scale exponent": [ + float(getattr(field.cosmo_factor.expr, "exp", 1.0)) ], - # Assign the exponents in the proper order - # (see unyt.dimensions.base_dimensions) - "U_I exponent": [dim_exponents["(current)"]], - "U_L exponent": [dim_exponents["(length)"]], - "U_M exponent": [dim_exponents["(mass)"]], - "U_T exponent": [dim_exponents["(temperature)"]], - "U_t exponent": [dim_exponents["(time)"]], - "a-scale exponent": [a_exp], "h-scale exponent": [0.0], } @@ -268,7 +282,7 @@ def get_attributes(self, scale_factor: float) -> dict: def get_dimensions(dimension: unyt.dimensions) -> dict: """ - Return exponents corresponding to base dimensions for given unyt dimensions object. + Return exponents corresponding to base dimensions for given :mod:`unyt.dimensions`. Parameters ---------- @@ -277,52 +291,34 @@ def get_dimensions(dimension: unyt.dimensions) -> dict: Returns ------- - np.ndarray - Array of exponents corresponding to each base dimension. + dict + Dictionary of exponents corresponding to each base dimension. Examples -------- .. code-block:: python >>> get_dimensions(unyt.dimensions.velocity) - { - "(mass)": 0, - "(length)": 1, - "(time)": -1, - "(temperature)": 0, - "(current)": 0, - } + {(mass): 0.0, (length): 1.0, (time): -1.0, (temperature): 0.0, (angle): 0.0, + (current_mks): 0.0, 1: 0.0, (luminous_intensity): 0.0, (logarithmic): 0.0} """ - # Get the names of all the dimensions - dimensions = [str(x) for x in unyt.dimensions.base_dimensions[:5]] - # Annoyingly it's current_mks instead of current in unyt, so change that - dimensions[4] = "(current)" - n_dims = len(dimensions) + # create the return dict defaulting to exponent of 0 + exp_dict = {dimension: 0.0 for dimension in unyt.dimensions.base_dimensions} - # create the return array - exp_array = {} - - # extract the base and exponent for each of the units - dim_array = [x.as_base_exp() for x in dimension.as_ordered_factors()] - - # Find out which dimensions and exponents are present in the base units - for i in range(n_dims): - present = False - for dim in dim_array: - if dimensions[i] in str(dim[0]): - present = True - exp_array[str(dim[0])] = float(dim[1]) - if not present: - exp_array[dimensions[i]] = 0.0 + # extract the exponent for each of the dimensions + exp_dict.update( + (k, float(v)) + for k, v in (d.as_base_exp() for d in dimension.as_ordered_factors()) + ) - return exp_array + return exp_dict def generate_getter( name: str, -) -> Callable[[__SWIFTWriterParticleDataset], unyt.unyt_array]: +) -> Callable[[__SWIFTWriterParticleDataset], cosmo_array]: """ - Generate a function that gets the unyt array for name. + Generate a getter for the :class:`~swiftsimio.objects.cosmo_array` for ``name``. Parameters ---------- @@ -332,10 +328,11 @@ def generate_getter( Returns ------- Callable - Getter function that returns unyt array for ``name``. + Getter function that returns :class:`~swiftsimio.objects.cosmo_array` for + ``name``. """ - def getter(self: __SWIFTWriterParticleDataset) -> unyt.unyt_array: + def getter(self: __SWIFTWriterParticleDataset) -> cosmo_array: """ Get the value of the dataset from the private name attribute. @@ -346,7 +343,7 @@ def getter(self: __SWIFTWriterParticleDataset) -> unyt.unyt_array: Returns ------- - unyt_array + cosmo_array The value of the named data field. """ return getattr(self, f"_{name}") @@ -356,9 +353,9 @@ def getter(self: __SWIFTWriterParticleDataset) -> unyt.unyt_array: def generate_setter( name: str, dimensions: unyt.dimensions, unit_system: unyt.UnitSystem | str -) -> Callable[[__SWIFTWriterParticleDataset, unyt.unyt_array], None]: +) -> Callable[[__SWIFTWriterParticleDataset, cosmo_array], None]: """ - Generate a function that sets self._name to the value that is passed to it. + Generate a function that sets ``self._`` to the value that is passed to it. Parameters ---------- @@ -377,32 +374,62 @@ def generate_setter( Function to set ``self._name``. """ - def setter(self: __SWIFTWriterParticleDataset, value: unyt.unyt_array) -> None: + def setter(self: __SWIFTWriterParticleDataset, value: cosmo_array) -> None: """ - Set the named dataset to a value (private name attribute). + Set the named dataset (private attribute) to a value. + + Makes consistency checks on the input: + + + the given units have the expected dimensions for this field + + that values are unique and >=1 for ``particle_ids`` + + that the scale factor attached to :class:`~swiftsimio.objects.cosmo_array` + inputs matches the one for the top-level output metadata. Parameters ---------- self : __SWIFTWriterParticleDataset - The dataset the attribute is attached to. + The dataset that the attribute is attached to. - value : unyt_array + value : cosmo_array The value to set the attribute to. """ + if hasattr(value, "cosmo_factor"): + assert value.cosmo_factor.scale_factor == self.writer.scale_factor, ( + f"The scale factor of {name} ({value.cosmo_factor.scale_factor}) does not" + f" match the scale factor of the Writer ({self.writer.scale_factor})." + ) if dimensions != 1: - if isinstance(value, unyt.unyt_array): + if isinstance(value, cosmo_array): if value.units.dimensions == dimensions: value.convert_to_base(unit_system) - + value.convert_to_comoving() setattr(self, f"_{name}", value) else: raise unyt.exceptions.InvalidUnitEquivalence( f"Convert to {name}", value.units.dimensions, dimensions ) else: - raise TypeError("You must provide quantities as unyt arrays.") + raise TypeError(f"Provide {name} as swiftsimio.cosmo_array.") else: - setattr(self, f"_{name}", unyt.unyt_array(value, None)) + if name == "particle_ids": + if any(value <= 0): + raise ValueError(f"{self.particle_name}.particle_ids must be >= 1.") + if np.unique(value).size != value.size: + raise ValueError( + f"{self.particle_name}.particle_ids must not have repeated IDs." + ) + + setattr( + self, + f"_{name}", + cosmo_array( + value, + unyt.dimensionless, + comoving=True, + scale_factor=self.writer.scale_factor, + scale_exponent=0.0, + ), + ) return @@ -411,7 +438,7 @@ def setter(self: __SWIFTWriterParticleDataset, value: unyt.unyt_array) -> None: def generate_deleter(name: str) -> Callable[[__SWIFTWriterParticleDataset], None]: """ - Generate a function that destroys self._name (sets it back to None). + Generate a function that destroys ``self._`` (sets it back to ``None``). Parameters ---------- @@ -421,12 +448,12 @@ def generate_deleter(name: str) -> Callable[[__SWIFTWriterParticleDataset], None Returns ------- Callable - Function to delete ``self._name``. + Function to delete ``self._``. """ def deleter(self: __SWIFTWriterParticleDataset) -> None: """ - Delete the named dataset (private name attribute). + Delete the named (private attribute) dataset. Parameters ---------- @@ -443,37 +470,33 @@ def deleter(self: __SWIFTWriterParticleDataset) -> None: def generate_dataset( + writer: "SWIFTSnapshotWriter", unit_system: unyt.UnitSystem | str, particle_type: int, - unit_fields_generate_units: Callable[ - ..., dict - ] = metadata.unit_fields.generate_units, ) -> __SWIFTWriterParticleDataset: """ - Generate a SWIFTWriterParticleDataset _class_ for the given particle type. + Generate a :class:`~swiftsimio.snapshot_writer.SWIFTWriterParticleDataset`. - We _must_ do the following _outside_ of the class itself, as one - can assign properties to a _class_ but not _within_ a class - dynamically. + We _must_ do the following _outside_ of the class itself, as one can assign properties + to a _class_ but not _within_ a class dynamically. - Here we loop through all of the possible properties in the metadata file. - We then use the builtin property() function and some generators to - create setters and getters for those properties. This will allow them - to be accessed from outside by using SWIFTWriterParticleDataset.name, where - the name is, for example, coordinates. + Here we loop through all of the possible properties in the metadata file. We then use + the builtin property() function and some generators to create setters and getters for + those properties. This will allow them to be accessed from outside by using + ``SWIFTWriterParticleDataset(...).``, where the name is, for example, + ``coordinates``. Parameters ---------- + writer : ~swiftsimio.snapshot_writer.SWIFTSnapshotWriter + A reference to the writer that will contain the generated particle dataset writer. + unit_system : unyt.UnitSystem or str Unit system of the dataset. particle_type : int The particle type of the dataset. Numbering convention is the same as - SWIFT, with 0 corresponding to gas, etc. as usual. - - unit_fields_generate_units : Callable, optional - Collection of properties in metadata file for which to create setters - and getters. + SWIFT, with ``0`` corresponding to gas, etc. as usual. Returns ------- @@ -486,13 +509,13 @@ def generate_dataset( this_dataset_bases = (__SWIFTWriterParticleDataset, object) this_dataset_dict = {} - # Get the unit dimensions - dimensions = metadata.unit_fields.generate_dimensions(unit_fields_generate_units) - - for name in getattr(metadata.required_fields, particle_name).keys(): + for name, required_field_info in getattr( + metadata.required_fields, particle_name + ).items(): + dimensions = required_field_info["dimensions"] this_dataset_dict[name] = property( generate_getter(name), - generate_setter(name, dimensions[particle_name][name], unit_system), + generate_setter(name, dimensions, unit_system), generate_deleter(name), ) @@ -500,7 +523,7 @@ def generate_dataset( f"{particle_nice_name}WriterDataset", this_dataset_bases, this_dataset_dict ) - empty_dataset = ThisDataset(unit_system, particle_type) + empty_dataset = ThisDataset(writer, unit_system, particle_type) return empty_dataset @@ -509,12 +532,36 @@ class SWIFTSnapshotWriter(object): """ The SWIFT dataset writer. - This is used to store all particle arrays and do some extra processing before writing - a HDF5 file containing: + This is used to store all particle arrays and do some extra consistency checks and + processing before writing a HDF5 file containing: + Fully consistent unit system + All required arrays for SWIFT to start - + Required metadata (all automatic, apart from those required by __init__) + + Required metadata (all automatic, apart from those required by ``__init__``) + + The written output can be read back in using :mod:`swiftsimio`, or used as initial + conditions for a SWIFT simulation. + + Any of the usual datasets (``gas``, ``dark_matter``, ``stars``, ``black_holes``, + ``sinks``, ``neutrinos``, ``boundary``) can be populated with data. If a dataset is + populated, it must have all ``required_fields`` filled in. The required fields can be + viewed, for example, with ``swiftsimio.metadata.writer.required_fields.gas.keys()``. + The only exception is ``particle_ids``, that will be generated automatically if + missing. Smoothing length estimates for uniform-density gas in a periodic box can + also be auto-generated, but this has to be done explicitly (see example below). + + To fill in a required dataset, provide the data with a + :class:`~swiftsimio.objects.cosmo_array`. For example: + + .. code-block:: python + + w.gas.masses = cosmo_array( + np.ones(n_p, dtype=float) * 1e6, + u.solMass, + comoving=True, + scale_factor=w.scale_factor, + scale_exponent=0, + ) Parameters ---------- @@ -525,49 +572,118 @@ class SWIFTSnapshotWriter(object): Size of simulation box and associated units. dimension : int, optional - Dimensions of simulation. + Dimensions (length of coordinate vector) of simulation. compress : bool, optional - Flag to turn on compression. + Flag to turn on gzip compression. extra_header : dict, optional - Dictionary containing extra things to write to the header. - - unit_fields_generate_units : Callable, optional - Collection of properties in metadata file for which to create setters - and getters. + Dictionary containing extra fields to write to the HDF5 ``Header`` group. scale_factor : np.float32 - Scale factor associated with dataset. Defaults to 1. + Scale factor associated with dataset. + + Examples + -------- + An example showing populating all required fields for the ``gas`` dataset: + + .. code-block:: python + + import numpy as np + import unyt as u + from swiftsimio import Writer, cosmo_array + from swiftsimio.metadata.writer.unit_systems import cosmo_unit_system + + # number of gas particles + n_p = 1000 + # scale factor of 1.0 + a = 1.0 + # Box is 100 Mpc + lbox = 100 + boxsize = cosmo_array( + [lbox, lbox, lbox], + u.Mpc, + comoving=True, + scale_factor=a, + scale_exponent=1, + ) + + # Create the Writer object. cosmo_unit_system corresponds to default Gadget-like + # units of 10^10 Msun, Mpc, and km/s + w = Writer(unit_system=cosmo_unit_system, boxsize=boxsize, scale_factor=a) + + # Randomly spaced coordinates from 0 to lbox Mpc in each direction + w.gas.coordinates = cosmo_array( + np.random.rand(n_p, 3) * lbox, + u.Mpc, + comoving=True, + scale_factor=w.scale_factor, + scale_exponent=1, + ) + + # Random velocities from 0 to 1 km/s + w.gas.velocities = cosmo_array( + np.random.rand(n_p, 3), + u.km / u.s, + comoving=True, + scale_factor=w.scale_factor, + scale_exponent=1, + ) + + # Generate uniform masses as 10^6 solar masses for each particle + w.gas.masses = cosmo_array( + np.ones(n_p, dtype=float) * 1e6, + u.solMass, + comoving=True, + scale_factor=w.scale_factor, + scale_exponent=0, + ) + + # Generate internal energy corresponding to 10^4 K + w.gas.internal_energy = cosmo_array( + np.ones(n_p, dtype=float) * 1e4 / 1e6, + u.kb * u.K / u.solMass, + comoving=True, + scale_factor=w.scale_factor, + scale_exponent=-2, + ) + + # Generate initial guess for smoothing lengths based on mean inter-particle spacing + w.gas.generate_smoothing_lengths() + + # w.gas.particle_ids can optionally be set, otherwise they are auto-generated + + # write the initial conditions out to a file + w.write("ics.hdf5") """ def __init__( self, - unit_system: unyt.UnitSystem | str, + *, + unit_system: unyt.UnitSystem | str = cosmo_units, boxsize: cosmo_array, dimension: int = 3, compress: bool = True, extra_header: dict | None = None, - unit_fields_generate_units: Callable[ - ..., dict - ] = metadata.unit_fields.generate_units, scale_factor: np.float32 = 1.0, ) -> None: - self.unit_fields_generate_units = unit_fields_generate_units if isinstance(unit_system, str): self.unit_system = unyt.unit_systems.unit_system_registry[unit_system] else: self.unit_system = unit_system - # Validate the boxsize and convert to our units. try: - for x in boxsize: - x.convert_to_base(self.unit_system) - self.boxsize = boxsize - except TypeError: - # This is just a single number (i.e. uniform in all dimensions) - boxsize.convert_to_base(self.unit_system) - self.boxsize = boxsize + if len(boxsize) != dimension: + raise ValueError( + f"boxsize must have length equal to number of dimensions, {boxsize=}," + f" {dimension=}." + ) + except TypeError: # len() of unsized object + raise ValueError( + "boxsize must be a cosmo_array of length equal to spatial dimensions." + ) + self.boxsize = np.copy(boxsize, subok=True) + self.boxsize.convert_to_base(self.unit_system) self.dimension = dimension self.compress = compress @@ -586,34 +702,41 @@ def create_particle_datasets(self) -> None: setattr( self, name, - generate_dataset( - self.unit_system, number, self.unit_fields_generate_units - ), + generate_dataset(self, self.unit_system, number), ) return def _generate_ids(self, names_to_write: list) -> None: """ - (Re-)generate all particle IDs for groups with names in names_to_write. + (Re-)generate all particle IDs for groups with names in ``names_to_write``. Parameters ---------- names_to_write : list - List of groups to regenerate ids for. + List of groups to (re-)generate ids for. """ - numbers_of_particles = [getattr(self, name).n_part for name in names_to_write] # Start particle ID's at 1. When running with hydro + DM, partID = 0 # is a no-no because the ID's are used as offsets in arrays. The code # will most likely crash at some point, and "part_verify_links()" will # complain about it if SWIFT is run with debugging checks on. already_used = 1 - for number, name in zip(numbers_of_particles, names_to_write): - getattr(self, name).particle_ids = np.arange( - already_used, number + already_used + for name in names_to_write: + if getattr(self, name)._particle_ids is not None: + warn( + f"Overwriting {name}.particle_ids, to prevent this provide particle " + "IDs for all particle types, or none.", + RuntimeWarning, + ) + getattr(self, name).particle_ids = cosmo_array( + np.arange(already_used, getattr(self, name).n_part + already_used), + unyt.dimensionless, + comoving=True, + scale_factor=self.scale_factor, + scale_exponent=0.0, ) - already_used += number + already_used += getattr(self, name).n_part return @@ -656,6 +779,7 @@ def _write_metadata(self, handle: h5py.File, names_to_write: list) -> None: "NumPart_Total_HighWord": [0] * 6, "Flag_Entropy_ICs": 0, "Dimension": np.array([self.dimension]), + "Scale-factor": [self.scale_factor], # LEGACY but required for Gadget readers "NumFilesPerSnapshot": 1, "NumPart_ThisFile": number_of_particles, @@ -676,7 +800,9 @@ def _write_units(self, handle: h5py.File) -> None: """ Write the unit information to file. - Note that we do not have support for unit current yet. + .. note:: + + We do not have support for units of current yet. Parameters ---------- @@ -750,8 +876,15 @@ def write(self, filename: str) -> None: if generate_ids: self._generate_ids(names_to_write) + else: + all_ids = np.concatenate( + [getattr(self, name).particle_ids for name in names_to_write] + ) + if np.unique(all_ids).size != all_ids.size: + raise ValueError( + "Particle IDs must be unique across all particle types." + ) - # Now we do the hard part with h5py.File(filename, "w") as handle: self._write_metadata(handle, names_to_write) @@ -759,7 +892,7 @@ def write(self, filename: str) -> None: for name in names_to_write: getattr(self, name).write_particle_group(handle, compress=self.compress) - attrs = getattr(self, name).get_attributes(self.scale_factor) + attrs = getattr(self, name).get_attributes() getattr(self, name).write_particle_group_metadata(handle, attrs) return diff --git a/swiftsimio/units.py b/swiftsimio/units.py deleted file mode 100644 index de129721..00000000 --- a/swiftsimio/units.py +++ /dev/null @@ -1,29 +0,0 @@ -""" -Define unit systems that may be useful to astronomers. - -In particular, we define the cosmo_units which can be considered Gadget-oid default units: - -+ Unit length = Mpc -+ Unit velocity = km/s -+ Unit mass = 10^10 Msun -+ Unit temperature = K - -Also contains unit conversion factors, to simplify units wherever possible. -""" - -import unyt - - -try: - # Need to do this first otherwise the `unyt` system freaks out about - # us upgrading msun from a symbol - cosmo_units = unyt.UnitSystem( - "cosmological", - unyt.Mpc, - unyt.unyt_quantity(1e10, units=unyt.Solar_Mass), - unyt.unyt_quantity(1.0, units=unyt.s * unyt.Mpc / unyt.km).to(unyt.Gyr), - ) -except RuntimeError: - # We've already done that, oops. - cosmo_units = unyt.unit_systems.cosmological - pass diff --git a/tests/conftest.py b/tests/conftest.py index b625bd14..8e1d89f2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -8,8 +8,10 @@ import numpy as np import unyt import h5py -from swiftsimio import Writer -from swiftsimio.units import cosmo_units +from swiftsimio import Writer, cosmo_array +import swiftsimio.metadata.particle as particle_metadata +import swiftsimio.metadata.writer.required_fields as writer_required_fields +from .helper import create_minimal_writer, create_two_type_writer from swiftsimio.optional_packages import HDFSTREAM_AVAILABLE, hdfstream # URL to download the test data @@ -216,7 +218,7 @@ def cosmological_volume(request: pytest.FixtureRequest) -> Generator[str, None, Yields ------ - out : Generator[str, None, None] + Generator[str, None, None] The file name, after downloading if required. """ filename, access_method = request.param @@ -255,7 +257,7 @@ def cosmological_volume_only_single_local() -> Generator[str, None, None]: Yields ------ - out : Generator[str, None, None] + Generator[str, None, None] The file name, after downloading if required. """ yield _requires("EagleSingle.hdf5") @@ -275,7 +277,7 @@ def cosmological_volume_only_distributed( Yields ------ - out : Generator[str, None, None] + Generator[str, None, None] The file name, after downloading if required. """ yield request.param("EagleDistributed.hdf5", request) @@ -299,7 +301,7 @@ def cosmological_volume_dithered( Yields ------ - out : Generator[str, None, None] + Generator[str, None, None] The file name, after downloading if required. """ yield request.param("LegacyCosmologicalVolumeDithered.hdf5", request) @@ -317,7 +319,7 @@ def soap_example(request: pytest.FixtureRequest) -> Generator[str, None, None]: Yields ------ - out : Generator[str, None, None] + Generator[str, None, None] The file name, after downloading if required. """ yield request.param("SoapExample.hdf5", request) @@ -345,57 +347,150 @@ def snapshot_or_soap(request: pytest.FixtureRequest) -> Generator[str, None, Non Yields ------ - out : Generator[str, None, None] + Generator[str, None, None] The file name, after downloading if required. """ filename, access_method = request.param yield access_method(filename, request) +@pytest.fixture(scope="function") +def simple_writer() -> Generator[Writer, None, None]: + """ + Provide a :class:`~swiftsimio.snapshot_writer.Writer` with required gas fields. + + Yields + ------ + Generator[Writer, None, None] + The Writer object. + """ + yield create_minimal_writer() + + +@pytest.fixture(scope="function") +def two_type_writer() -> Generator[Writer, None, None]: + """ + Provide a :class:`~swiftsimio.snapshot_writer.Writer` with required gas & DM fields. + + Yields + ------ + Generator[Writer, None, None] + The Writer object. + """ + yield create_two_type_writer() + + @pytest.fixture(scope="function") def simple_snapshot_data() -> Generator[tuple[Writer, str], None, None]: """ - Fixture provides a simple IC-like snapshot for testing. + Provide a simple IC-like snapshot for testing. Yields ------ - out : Generator[tuple[Writer, str], None, None] + Generator[tuple[Writer, str], None, None] The Writer object and the name of the file it wrote. """ test_filename = "test_write_output_units.hdf5" + w = create_minimal_writer() - # Box is 100 Mpc - boxsize = 100 * unyt.Mpc + w.write(test_filename) - # Generate object. cosmo_units corresponds to default Gadget-oid units - # of 10^10 Msun, Mpc, and km/s - x = Writer(cosmo_units, boxsize) + yield w, test_filename + + os.remove(test_filename) - # 32^3 particles. - n_p = 32**3 - # Randomly spaced coordinates from 0, 100 Mpc in each direction - x.gas.coordinates = np.random.rand(n_p, 3) * (100 * unyt.Mpc) +def _setup_extra_part_type(): + """ + Set up metadata for an extra particle type. - # Random velocities from 0 to 1 km/s - x.gas.velocities = np.random.rand(n_p, 3) * (unyt.km / unyt.s) + Ideally this tinkering with global variables should be refactored out. + """ + particle_metadata.particle_name_underscores["PartType7"] = "extratype" + particle_metadata.particle_name_class["PartType7"] = "Extratype" + particle_metadata.particle_name_text["PartType7"] = "Extratype" + writer_required_fields.extratype = { + "smoothing_lengths": { + "handle": "SmoothingLengths", + "dimensions": unyt.dimensions.length, + }, + **writer_required_fields._shared, + } + + +def _teardown_extra_part_type(): + """ + Tear down metadata for an extra particle type. - # Generate uniform masses as 10^6 solar masses for each particle - x.gas.masses = np.ones(n_p, dtype=float) * (1e6 * unyt.msun) + Ideally this tinkering with global variables should be refactored out. + """ + particle_metadata.particle_name_underscores.pop("PartType7") + particle_metadata.particle_name_class.pop("PartType7") + particle_metadata.particle_name_text.pop("PartType7") + del writer_required_fields.extratype - # Generate internal energy corresponding to 10^4 K - x.gas.internal_energy = ( - np.ones(n_p, dtype=float) * (1e4 * unyt.kb * unyt.K) / (1e6 * unyt.msun) + +@pytest.fixture(scope="function") +def extra_part_type(): + """Set up and tear down metadata for an extra particle type.""" + _setup_extra_part_type() + yield + _teardown_extra_part_type() + + +@pytest.fixture(scope="function") +def write_extra_part_type(): + """ + Write a dataset with extra particle type so that we can test reading it in. + + Make sure we reliably clean up global variables and written file. + """ + _setup_extra_part_type() + unit_system = unyt.UnitSystem( + name="default", length_unit=unyt.cm, mass_unit=unyt.g, time_unit=unyt.s + ) + a = 0.5 + boxsize = cosmo_array( + [10, 10, 10], unyt.cm, comoving=False, scale_factor=a, scale_exponent=1 ) - # Generate initial guess for smoothing lengths based on MIPS - x.gas.generate_smoothing_lengths(boxsize=boxsize, dimension=3) + x = Writer( + unit_system=unit_system, + boxsize=boxsize, + scale_factor=a, + ) - # If IDs are not present, this automatically generates - x.write(test_filename) + x.extratype.coordinates = cosmo_array( + np.array([np.arange(10), np.zeros(10), np.zeros(10)]).astype(float).T, + unyt.cm, + comoving=False, + scale_factor=x.scale_factor, + scale_exponent=1, + ) + x.extratype.velocities = cosmo_array( + np.zeros((10, 3), dtype=float), + unyt.cm / unyt.s, + comoving=False, + scale_factor=x.scale_factor, + scale_exponent=0, + ) - # Yield the test data - yield x, test_filename + x.extratype.masses = cosmo_array( + np.ones(10, dtype=float), + unyt.g, + comoving=False, + scale_factor=x.scale_factor, + scale_exponent=0, + ) - # The file is automatically cleaned up after the test. - os.remove(test_filename) + x.extratype.smoothing_lengths = cosmo_array( + np.ones(10, dtype=float) * 5.0, + unyt.cm, + comoving=False, + scale_factor=x.scale_factor, + scale_exponent=1, + ) + x.write("extra_test.hdf5") + yield + os.remove("extra_test.hdf5") + _teardown_extra_part_type() diff --git a/tests/helper.py b/tests/helper.py index ce3876ea..fcf61045 100644 --- a/tests/helper.py +++ b/tests/helper.py @@ -5,7 +5,7 @@ import h5py import unyt as u from swiftsimio.subset_writer import find_links, write_metadata -from swiftsimio import mask, cosmo_array +from swiftsimio import mask, cosmo_array, Writer from swiftsimio.masks import SWIFTMask from swiftsimio._file_utils import open_path_or_handle @@ -167,3 +167,132 @@ def create_n_particle_dataset( ] = num_parts return + + +def create_minimal_writer(a: float = 1.0, n_p: int = 32**3, lbox: float = 100): + """ + Create a :class:`~swiftsimio.snapshot_writer.Writer` with required gas fields. + + Parameters + ---------- + a : float, optional + The scale factor. + + n_p : int, optional + The number of particles. + + lbox : float, optional + The box side length in Mpc. + + Returns + ------- + ~swiftsimio.snapshot_writer.Writer + The writer with required gas fields initialized. + """ + # Box is 100 Mpc + boxsize = cosmo_array( + [lbox, lbox, lbox], + u.Mpc, + comoving=True, + scale_factor=a, + scale_exponent=1, + ) + + # Generate object. cosmo_units corresponds to default Gadget-oid units + # of 10^10 Msun, Mpc, and km/s + w = Writer(boxsize=boxsize, scale_factor=a) + + # Randomly spaced coordinates from 0, lbox Mpc in each direction + w.gas.coordinates = cosmo_array( + np.random.rand(n_p, 3) * lbox, + u.Mpc, + comoving=True, + scale_factor=w.scale_factor, + scale_exponent=1, + ) + + # Random velocities from 0 to 1 km/s + w.gas.velocities = cosmo_array( + np.random.rand(n_p, 3), + u.km / u.s, + comoving=True, + scale_factor=w.scale_factor, + scale_exponent=1, + ) + + # Generate uniform masses as 10^6 solar masses for each particle + w.gas.masses = cosmo_array( + np.ones(n_p, dtype=float) * 1e6, + u.solMass, + comoving=True, + scale_factor=w.scale_factor, + scale_exponent=0, + ) + + # Generate internal energy corresponding to 10^4 K + w.gas.internal_energy = cosmo_array( + np.ones(n_p, dtype=float) * 1e4 / 1e6, + u.kb * u.K / u.solMass, + comoving=True, + scale_factor=w.scale_factor, + scale_exponent=-2, + ) + + # Generate initial guess for smoothing lengths based on MIPS + w.gas.generate_smoothing_lengths() + + # IDs will be auto-generated on file write + return w + + +def create_two_type_writer(a: float = 1.0, n_p: int = 32**3, lbox: float = 100): + """ + Create a :class:`~swiftsimio.snapshot_writer.Writer` with required gas & DM fields. + + Parameters + ---------- + a : float, optional + The scale factor. + + n_p : int, optional + The number of particles. + + lbox : float, optional + The box side length in Mpc. + + Returns + ------- + ~swiftsimio.snapshot_writer.Writer + The writer with required gas and dark_matter fields initialized. + """ + w = create_minimal_writer(a=a, n_p=n_p, lbox=lbox) + + # Randomly spaced coordinates from 0, lbox Mpc in each direction + w.dark_matter.coordinates = cosmo_array( + np.random.rand(n_p, 3) * lbox, + u.Mpc, + comoving=True, + scale_factor=w.scale_factor, + scale_exponent=1, + ) + + # Random velocities from 0 to 1 km/s + w.dark_matter.velocities = cosmo_array( + np.random.rand(n_p, 3), + u.km / u.s, + comoving=True, + scale_factor=w.scale_factor, + scale_exponent=1, + ) + + # Generate uniform masses as 10^6 solar masses for each particle + w.dark_matter.masses = cosmo_array( + np.ones(n_p, dtype=float) * 1e6, + u.solMass, + comoving=True, + scale_factor=w.scale_factor, + scale_exponent=0, + ) + + # IDs will be auto-generated on file write + return w diff --git a/tests/test_basic.py b/tests/test_basic.py deleted file mode 100644 index f8975c39..00000000 --- a/tests/test_basic.py +++ /dev/null @@ -1,52 +0,0 @@ -"""Basic integration test.""" - -from swiftsimio import load -from swiftsimio import Writer - -import unyt -import numpy as np - -import os - - -def test_write(): - """Create a sample dataset. Should not crash.""" - # Box is 100 Mpc - boxsize = 100 * unyt.Mpc - - # Generate object. cosmo_units corresponds to default Gadget-oid units - # of 10^10 Msun, Mpc, and km/s - x = Writer("galactic", boxsize) - - # 32^3 particles. - n_p = 32**3 - - # Randomly spaced coordinates from 0, 100 Mpc in each direction - x.gas.coordinates = np.random.rand(n_p, 3) * (100 * unyt.Mpc) - - # Random velocities from 0 to 1 km/s - x.gas.velocities = np.random.rand(n_p, 3) * (unyt.km / unyt.s) - - # Generate uniform masses as 10^6 solar masses for each particle - x.gas.masses = np.ones(n_p, dtype=float) * (1e6 * unyt.msun) - - # Generate internal energy corresponding to 10^4 K - x.gas.internal_energy = ( - np.ones(n_p, dtype=float) * (1e4 * unyt.kb * unyt.K) / (1e6 * unyt.msun) - ) - - # Generate initial guess for smoothing lengths based on MIPS - x.gas.generate_smoothing_lengths(boxsize=boxsize, dimension=3) - - # If IDs are not present, this automatically generates - x.write("test.hdf5") - - -def test_load(): - """Try to load the dataset we just made. Should not crash.""" - x = load("test.hdf5") - - x.gas.internal_energy - x.gas.coordinates - - os.remove("test.hdf5") diff --git a/tests/test_extraparts.py b/tests/test_extraparts.py index 9823522d..01c3cdd1 100644 --- a/tests/test_extraparts.py +++ b/tests/test_extraparts.py @@ -1,78 +1,12 @@ """Test for extra particle types.""" -from swiftsimio import load, metadata, Writer -import swiftsimio.metadata.particle as swp -import swiftsimio.metadata.writer.required_fields as swmw -import swiftsimio.metadata.unit.unit_fields as swuf - +from swiftsimio import load, Writer, cosmo_array import unyt -from unyt import Unit import numpy as np - import os -def generate_units( - mass: Unit, length: Unit, time: Unit, current: Unit, temperature: Unit -) -> dict[str, dict[str, Unit]]: - """ - Generate units differently for testing. - - This function is used to override the inbuilt swiftsimio generate_units function from - metadata.unit.unit_fields. This allows the specification of a new particle type and - metadata.unit.unit_fields. This allows the specification of a new particle type and - metadata.unit.unit_fields. This allows the specification of a new particle type and - the values and types associated with that type. - - Parameters - ---------- - mass : Unit - The mass unit. - - length : Unit - The length unit. - - time : Unit - The time unit. - - current : Unit - The current unit. - - temperature : Unit - The temperature unit. - - Returns - ------- - dict[str, Unit] - A dictionary mapping field names to units. - """ - dict_out = swuf.generate_units(mass, length, time, current, temperature) - - extratype = { - "coordinates": length, - "masses": mass, - "particle_ids": None, - "velocities": length / time, - "potential": length * length / (time * time), - "density": mass / (length**3), - "entropy": mass * length**2 / (time**2 * temperature), - "internal_energy": (length / time) ** 2, - "smoothing_length": length, - "pressure": mass / (length * time**2), - "diffusion": None, - "sfr": mass / time, - "temperature": temperature, - "viscosity": None, - "specific_sfr": 1 / time, - "material_id": None, - "radiated_energy": mass * (length / time) ** 2, - } - - dict_out["extratype"] = extratype - return dict_out - - -def test_write(): +def test_write(extra_part_type): """ Tests whether swiftsimio can handle a new particle type. @@ -82,59 +16,63 @@ def test_write(): unit_system = unyt.UnitSystem( name="default", length_unit=unyt.cm, mass_unit=unyt.g, time_unit=unyt.s ) - # Specify a new type in the metadata - currently done by editing the dictionaries - # directly. - # TODO: Remove this terrible way of setting up different particle types. - swp.particle_name_underscores["PartType7"] = "extratype" - swp.particle_name_class["PartType7"] = "Extratype" - swp.particle_name_text["PartType7"] = "Extratype" - - swmw.extratype = {"smoothing_length": "SmoothingLength", **swmw._shared} - - boxsize = 10 * unyt.cm - - x = Writer(unit_system, boxsize, unit_fields_generate_units=generate_units) - - x.extratype.coordinates = np.zeros((10, 3)) * unyt.cm - for i in range(0, 10): - x.extratype.coordinates[i][0] = float(i) - - x.extratype.velocities = np.zeros((10, 3)) * unyt.cm / unyt.s + a = 0.5 + boxsize = cosmo_array( + [10, 10, 10], unyt.cm, comoving=False, scale_factor=a, scale_exponent=1 + ) - x.extratype.masses = np.ones(10, dtype=float) * unyt.g + x = Writer( + unit_system=unit_system, + boxsize=boxsize, + scale_factor=a, + ) - x.extratype.smoothing_length = np.ones(10, dtype=float) * (5.0 * unyt.cm) + x.extratype.coordinates = cosmo_array( + np.array([np.arange(10), np.zeros(10), np.zeros(10)]).astype(float).T, + unyt.cm, + comoving=False, + scale_factor=x.scale_factor, + scale_exponent=1, + ) + x.extratype.velocities = cosmo_array( + np.zeros((10, 3), dtype=float), + unyt.cm / unyt.s, + comoving=False, + scale_factor=x.scale_factor, + scale_exponent=0, + ) - x.write("extra_test.hdf5") + x.extratype.masses = cosmo_array( + np.ones(10, dtype=float), + unyt.g, + comoving=False, + scale_factor=x.scale_factor, + scale_exponent=0, + ) - # Clean up these global variables we screwed around with... - swp.particle_name_underscores.pop("PartType7") - swp.particle_name_class.pop("PartType7") - swp.particle_name_text.pop("PartType7") + x.extratype.smoothing_lengths = cosmo_array( + np.ones(10, dtype=float) * 5.0, + unyt.cm, + comoving=False, + scale_factor=x.scale_factor, + scale_exponent=1, + ) + testfile = "extra_test.hdf5" + try: + x.write(testfile) + finally: + if os.path.exists(testfile): + os.remove(testfile) -def test_read(): +def test_read(write_extra_part_type): """ Test whether swiftsimio can handle a new particle type. Has a few asserts to check the data is read in correctly. """ - swp.particle_name_underscores["PartType7"] = "extratype" - swp.particle_name_class["PartType7"] = "Extratype" - swp.particle_name_text["PartType7"] = "Extratype" - - swmw.extratype = {"smoothing_length": "SmoothingLength", **swmw._shared} - - metadata.particle_fields.extratype = {**metadata.particle_fields.gas} - data = load("extra_test.hdf5") - for i in range(0, 10): - assert data.extratype.coordinates.value[i][0] == float(i) - - os.remove("extra_test.hdf5") - - # Clean up these global variables we screwed around with... - swp.particle_name_underscores.pop("PartType7") - swp.particle_name_class.pop("PartType7") - swp.particle_name_text.pop("PartType7") + assert data.extratype.coordinates.to_physical_value( + data.extratype.coordinates.units + )[i][0] == float(i) diff --git a/tests/test_read_ic.py b/tests/test_read_ic.py index 51207681..aa4e37f2 100644 --- a/tests/test_read_ic.py +++ b/tests/test_read_ic.py @@ -17,7 +17,7 @@ def test_reading_ic_units(simple_snapshot_data): "velocities", "masses", "internal_energy", - "smoothing_length", + "smoothing_lengths", ]: assert isinstance(getattr(data.gas, field), cosmo_array) assert np.allclose( diff --git a/tests/test_visualisation.py b/tests/test_visualisation.py index 684faff3..86502099 100644 --- a/tests/test_visualisation.py +++ b/tests/test_visualisation.py @@ -684,13 +684,13 @@ def test_equivalent_regions(self, backend, cosmological_volume_only_single_local assert fraction_within_tolerance( edge_img[box_res // 4 :, : box_res // 2][edge_mask], ref_img[: 3 * box_res // 4, box_res // 2 :][edge_mask], - frac={"nearest_neighbours": 0.75}.get(backend, 1.0), + frac={"nearest_neighbours": 0.5}.get(backend, 1.0), ) assert np.allclose(far_img, ref_img) assert fraction_within_tolerance( big_img, np.concatenate([np.hstack([ref_img] * 3)] * 3, axis=1), - frac={"nearest_neighbours": 0.75}.get(backend, 1.0), + frac={"nearest_neighbours": 0.5}.get(backend, 1.0), ) assert np.allclose(neg_img, ref_img) assert np.allclose(wrap_img, ref_img) diff --git a/tests/test_writer.py b/tests/test_writer.py new file mode 100644 index 00000000..71e2e25b --- /dev/null +++ b/tests/test_writer.py @@ -0,0 +1,483 @@ +"""Test the :class:`~swiftsimio.snapshot_writer.Writer`.""" + +import os +import pytest +import numpy as np +from swiftsimio import load, cosmo_array, cosmo_quantity, Writer +from .helper import create_minimal_writer +import unyt as u + + +def test_write(simple_snapshot_data): + """Create a sample dataset. Should not crash.""" + _, testfile = simple_snapshot_data + assert os.path.isfile(testfile) + + +def test_load(simple_snapshot_data): + """Try to load a dataset made by the writer. Should not crash.""" + _, testfile = simple_snapshot_data + dat = load(testfile) + dat.gas.internal_energy + dat.gas.coordinates + + +def test_scale_factor_written(simple_snapshot_data): + """Check that the scale factor gets written in the output.""" + testfile = "scale_factor_written.hdf5" + w = create_minimal_writer(a=0.5) + w.write(testfile) + dat = load(testfile) + assert w.scale_factor != 1.0 # make sure it's not just a default value + assert dat.metadata.a == w.scale_factor + + +def test_write_non_required_field(simple_writer): + """ + Try to write a non-required field with the writer. + + Expectation is that it is silently ignored and not written out. Written file should + still be readable. + + May be desirable to change this behaviour in the future, perhaps using TypedDict. + """ + testfile = "write_non_required_field.hdf5" + simple_writer.gas.metallicity = cosmo_array( + np.ones(simple_writer.gas.masses.size), + u.dimensionless, + comoving=True, + scale_factor=simple_writer.scale_factor, + scale_exponent=0.0, + ) + try: + simple_writer.write(testfile) + dat = load(testfile) + with pytest.raises( + AttributeError, match="'GasDataset' object has no attribute 'metallicity'" + ): + dat.gas.metallicity + finally: + if os.path.exists(testfile): + os.remove(testfile) + + +def test_write_missing_required(simple_writer): + """ + Try to write with a required field missing. + + Expectation is to raise with a helpful message. + """ + testfile = "write_missing_required.hdf5" + del simple_writer.gas.coordinates + try: + with pytest.raises( + AttributeError, match="Required dataset coordinates is None." + ): + simple_writer.write(testfile) + finally: + # expect not to write a file but clean up if the unexpected happens + if os.path.exists(testfile): + os.remove(testfile) + + +class TestSetterInputs: + """Tests for the setter methods with valid and invalid inputs.""" + + def test_setter_valid_input(self): + """Make sure setter accepts valid input and performs expected conversions.""" + a = 0.5 + w = Writer( + boxsize=cosmo_array( + [100, 100, 100], u.Mpc, comoving=True, scale_factor=a, scale_exponent=1 + ), + scale_factor=a, + ) + input_units = u.kpc + input_values = np.arange(300, dtype=float).reshape((100, 3)) + expected_units = w.unit_system[u.dimensions.length] + # a valid input, should be accepted: + w.gas.coordinates = cosmo_array( + input_values.copy(), # copy else we modify through this view + input_units, + comoving=False, + scale_factor=a, + scale_exponent=1.0, + ) + # check that we converted units + assert input_units != expected_units + assert np.allclose( + w.gas.coordinates.to_physical_value(input_units), input_values + ) + # check that we converted to comoving + assert w.gas.coordinates.cosmo_factor.a_factor != 1.0 # else this is trivial + assert w.gas.coordinates.comoving + assert np.allclose( + w.gas.coordinates.to_comoving_value(input_units), + input_values / w.gas.coordinates.cosmo_factor.a_factor, + ) + + def test_setter_invalid_shape_input(self, simple_writer): + """ + Try to set a dataset with a transposed coordinate array. + + Should be rejected on attempted write. + """ + del simple_writer.gas.coordinates + n_p = simple_writer.gas.masses.size + simple_writer.gas.coordinates = cosmo_array( + np.arange(3 * n_p, dtype=float).reshape( + (3, n_p) + ), # (n_p, 3) is valid shape + u.Mpc, + comoving=True, + scale_factor=simple_writer.scale_factor, + scale_exponent=1.0, + ) + testfile = "setter_invalid_coordinate_shape_input.hdf5" + try: + with pytest.raises( + AssertionError, + match="Arrays passed to gas dataset are not of the same size.", + ): + simple_writer.write(testfile) + finally: + # expect not to write a file but clean up if the unexpected happens + if os.path.exists(testfile): + os.remove(testfile) + + def test_setter_invalid_dimensions_input(self): + """Make sure setter refuses input with invalid units.""" + a = 0.5 + w = Writer( + boxsize=cosmo_array( + [100, 100, 100], u.Mpc, comoving=True, scale_factor=a, scale_exponent=1 + ), + scale_factor=a, + ) + testfile = "setter_invalid_dimensions_input.hdf5" + try: + with pytest.raises( + u.exceptions.InvalidUnitEquivalence, match="The unit equivalence" + ): + w.gas.masses = cosmo_array( + np.ones(100), + u.Mpc, + comoving=True, + scale_factor=a, + scale_exponent=1.0, + ) + finally: + # expect not to write a file but clean up if the unexpected happens + if os.path.exists(testfile): + os.remove(testfile) + + def test_setter_invalid_scale_factor_input(self): + """Make sure setter refuses input with mismatched scale factor.""" + a = 0.5 + w = Writer( + boxsize=cosmo_array( + [100, 100, 100], u.Mpc, comoving=True, scale_factor=a, scale_exponent=1 + ), + scale_factor=a, + ) + testfile = "setter_invalid_scale_factor_input.hdf5" + a_input = a + 0.1 + assert a_input != a + try: + with pytest.raises(AssertionError, match="The scale factor of masses"): + w.gas.masses = cosmo_array( + np.ones(100), + u.solMass, + comoving=True, + scale_factor=a_input, + scale_exponent=1.0, + ) + finally: + # expect not to write a file but clean up if the unexpected happens + if os.path.exists(testfile): + os.remove(testfile) + + def test_setter_unyt_array_input(self): + """Make sure setter refuses unyt_array (not cosmo_array) input.""" + a = 0.5 + w = Writer( + boxsize=cosmo_array( + [100, 100, 100], u.Mpc, comoving=True, scale_factor=a, scale_exponent=1 + ), + scale_factor=a, + ) + testfile = "setter_unyt_array_input.hdf5" + try: + with pytest.raises( + TypeError, match="Provide masses as swiftsimio.cosmo_array." + ): + w.gas.masses = np.ones(100) * u.solMass + finally: + if os.path.exists(testfile): + os.remove(testfile) + + def test_setter_ndarray_input(self): + """Make sure setter refuses numpy array (not cosmo_array) input.""" + a = 0.5 + w = Writer( + boxsize=cosmo_array( + [100, 100, 100], u.Mpc, comoving=True, scale_factor=a, scale_exponent=1 + ), + scale_factor=a, + ) + testfile = "setter_ndarray_input.hdf5" + try: + with pytest.raises( + TypeError, match="Provide masses as swiftsimio.cosmo_array." + ): + w.gas.masses = np.ones(100) + finally: + if os.path.exists(testfile): + os.remove(testfile) + + @pytest.mark.parametrize( + "input_data_generator", + ( + lambda n_p: np.arange(1, n_p + 1, dtype=int), + lambda n_p: np.arange(1, n_p + 1, dtype=int) * u.dimensionless, + lambda n_p: cosmo_array( + np.arange(1, n_p + 1, dtype=int), + u.dimensionless, + comoving=True, + scale_factor=1.0, + scale_exponent=0, + ), + ), + ) + def test_setter_dimensionless_input(self, simple_writer, input_data_generator): + """Check that fields with expected dimensionless input are accepted.""" + n_p = simple_writer.gas.masses.size + input_data = input_data_generator(n_p) + if hasattr(input_data, "cosmo_factor"): + assert input_data.cosmo_factor.scale_factor == simple_writer.scale_factor + testfile = "setter_dimensionless_input.hdf5" + assert simple_writer.gas._particle_ids is None + try: + simple_writer.gas.particle_ids = input_data + assert isinstance(simple_writer.gas.particle_ids, cosmo_array) + assert ( + simple_writer.gas.particle_ids.cosmo_factor.scale_factor + == simple_writer.scale_factor + ) + simple_writer.write(testfile) + finally: + if os.path.exists(testfile): + os.remove(testfile) + + +@pytest.mark.parametrize( + "input_data_generator", + ( + lambda n_p: np.arange(n_p, dtype=int) + 1000, + lambda n_p: (np.arange(n_p, dtype=int) + 1000) * u.dimensionless, + lambda n_p: cosmo_array( + np.arange(n_p, dtype=int) + 1000, + u.dimensionless, + comoving=True, + scale_factor=1.0, + scale_exponent=0, + ), + ), +) +def test_explicit_particle_ids(input_data_generator, simple_writer): + """ + Check that particle IDs can be set directly (i.e. not auto-generated). + + The IDs used in this test are offset by 1000 to make sure we didn't use auto-generated + IDs instead of the ones explicitly provided. + """ + testfile = "explicit_particle_ids.hdf5" + assert simple_writer.gas._particle_ids is None + n_p = simple_writer.gas.masses.size + input_data = input_data_generator(n_p) + try: + simple_writer.gas.particle_ids = input_data + assert isinstance(simple_writer.gas.particle_ids, cosmo_array) + assert ( + simple_writer.gas.particle_ids.cosmo_factor.scale_factor + == simple_writer.scale_factor + ) + simple_writer.write(testfile) + dat = load(testfile) + assert ( + dat.gas.particle_ids.to_comoving_value(u.dimensionless).astype(int) + == input_data.view(np.ndarray) + ).all() + finally: + if os.path.exists(testfile): + os.remove(testfile) + + +def test_duplicate_ids_invalid(simple_writer): + """Check that duplicate particle IDs are rejected.""" + assert simple_writer.gas._particle_ids is None + with pytest.raises( + ValueError, match="gas.particle_ids must not have repeated IDs." + ): + simple_writer.gas.particle_ids = np.ones(simple_writer.gas.masses.size) + + +def test_duplicate_ids_across_types_invalid(two_type_writer): + """Check that duplicate particle IDs in different particle types are rejected.""" + testfile = "duplicate_ids_across_types_invalid.hdf5" + assert two_type_writer.gas._particle_ids is None + assert two_type_writer.dark_matter._particle_ids is None + two_type_writer.gas.particle_ids = np.arange(1, two_type_writer.gas.masses.size + 1) + two_type_writer.dark_matter.particle_ids = np.arange( + 1, two_type_writer.dark_matter.masses.size + 1 + ) + try: + with pytest.raises( + ValueError, match="Particle IDs must be unique across all particle types." + ): + two_type_writer.write(testfile) + finally: + # expect not to write a file but clean up if the unexpected happens + if os.path.exists(testfile): + os.remove(testfile) + + +@pytest.mark.parametrize("invalid_id", (0, -1)) +def test_invalid_ids(invalid_id, simple_writer): + """Check that a particle ID with a 0 or negative value is rejected.""" + invalid_ids = np.arange(1, simple_writer.gas.masses.size + 1) # valid for now + invalid_ids[0] = invalid_id # now invalid + assert np.unique(invalid_ids).size == invalid_ids.size + with pytest.raises(ValueError, match="gas.particle_ids must be >= 1."): + simple_writer.gas.particle_ids = invalid_ids + + +class TestGeneratedParticleIDs: + """Test automatically generated particle IDs.""" + + def test_generated_particle_ids(self, simple_writer): + """Check that particle ID generation produces sensible IDs.""" + testfile = "generated_particle_ids.hdf5" + assert simple_writer.gas._particle_ids is None + assert simple_writer.gas.check_consistent() + assert simple_writer.gas.requires_particle_ids_before_write + try: + simple_writer.write(testfile) + dat = load(testfile) + assert dat.gas.particle_ids.size == simple_writer.gas.masses.size + assert np.unique(dat.gas.particle_ids).size == dat.gas.particle_ids.size + finally: + if os.path.exists(testfile): + os.remove(testfile) + + def test_generated_particle_ids_multiple_types(self, two_type_writer): + """Check that all IDs are unique for multiple types.""" + testfile = "generated_particle_ids_multiple_types.hdf5" + assert two_type_writer.gas._particle_ids is None + assert two_type_writer.dark_matter._particle_ids is None + try: + two_type_writer.write(testfile) + dat = load(testfile) + assert ( + np.unique( + np.concatenate((dat.gas.particle_ids, dat.dark_matter.particle_ids)) + ).size + == dat.gas.masses.size + dat.dark_matter.masses.size + ) + finally: + if os.path.exists(testfile): + os.remove(testfile) + + def test_overwrite_particle_ids(self, two_type_writer): + """ + Check that we overwrite particle_ids when they are not present for all types. + + If user provides IDs for some types and not others we don't bother working out + what IDs are available to assign to particle that don't have IDs, we just + overwrite everything. However, if we overwrite any user-provided IDs, we should + warn. + """ + testfile = "overwrite_particle_ids.hdf5" + two_type_writer.gas.particle_ids = np.arange( + 1, two_type_writer.gas.masses.size + 1 + ) + assert two_type_writer.dark_matter._particle_ids is None + try: + with pytest.warns(RuntimeWarning, match="Overwriting gas.particle_ids"): + two_type_writer.write(testfile) + finally: + if os.path.exists(testfile): + os.remove(testfile) + + +def test_explicit_smoothing_lengths(simple_writer): + """Check that smoothing lengths can be explicitly passed in.""" + testfile = "explicit_smoothing_lengths.hdf5" + simple_writer.gas._smoothing_lengths = None # ensure they are blank + simple_writer.gas.smoothing_lengths = cosmo_array( + np.ones(simple_writer.gas.masses.size), + u.kpc, + comoving=True, + scale_factor=simple_writer.scale_factor, + scale_exponent=1, + ) + try: + simple_writer.write(testfile) + dat = load(testfile) + assert np.allclose( + dat.gas.smoothing_lengths, simple_writer.gas.smoothing_lengths + ) + finally: + if os.path.exists(testfile): + os.remove(testfile) + + +def test_generated_smoothing_lengths(two_type_writer): + """ + Check that we can automatically generate smoothing lengths. + + This should only work for types where smoothing lengths are a required field. + """ + testfile = "generated_smoothing_lengths.hdf5" + two_type_writer.gas._smoothing_lengths = None # ensure they are blank + n_gas = two_type_writer.gas.coordinates.shape[0] + assert (np.diff(two_type_writer.boxsize) == 0).all() + gas_mips = two_type_writer.boxsize[0] / n_gas ** (1 / two_type_writer.dimension) + two_type_writer.gas.generate_smoothing_lengths() + assert np.allclose(two_type_writer.gas.smoothing_lengths, 2 * gas_mips) + assert two_type_writer.gas._smoothing_lengths is not None + with pytest.raises( + RuntimeError, + match="Cannot generate smoothing lengths for particle types that don't require " + "them.", + ): + two_type_writer.dark_matter.generate_smoothing_lengths() + try: + two_type_writer.write(testfile) + dat = load(testfile) + assert np.allclose( + dat.gas.smoothing_lengths, two_type_writer.gas.smoothing_lengths + ) + finally: + if os.path.exists(testfile): + os.remove(testfile) + + +@pytest.mark.parametrize( + "boxsize", + [ + cosmo_array( + [100, 100, 100], u.Mpc, comoving=True, scale_factor=1.0, scale_exponent=1 + ), + cosmo_quantity(100, u.Mpc, comoving=True, scale_factor=1.0, scale_exponent=1), + ], +) +def test_mismatched_boxsize_dimension(boxsize): + """Check that we refuse a boxsize that differs in length from dimension.""" + a = boxsize.cosmo_factor.scale_factor + with pytest.raises(ValueError, match="boxsize must"): + Writer( + boxsize=boxsize, + dimension=2, + scale_factor=a, + )