diff --git a/CMakeLists.txt b/CMakeLists.txt index bd888f84b5..dba77d38be 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -826,6 +826,7 @@ if(openPMD_BUILD_TESTING) test/Files_SerialIO/close_and_reopen_test.cpp test/Files_SerialIO/filebased_write_test.cpp test/Files_SerialIO/issue_1744_unique_ptrs_at_close_time.cpp + test/Files_SerialIO/components_without_extent.cpp ) elseif(${test_name} STREQUAL "ParallelIO" AND openPMD_HAVE_MPI) list(APPEND ${out_list} diff --git a/docs/source/details/backendconfig.rst b/docs/source/details/backendconfig.rst index 185c111526..2f64366326 100644 --- a/docs/source/details/backendconfig.rst +++ b/docs/source/details/backendconfig.rst @@ -122,6 +122,11 @@ Or in a Series constructor JSON/TOML configuration: "hint_lazy_parsing_timeout": 20 } +As of openPMD-api 0.17.0, the parser verifies that all records within a mesh or within a particle species have consistent shapes / extents. +This is used for filling in the shape for constant components that do not define it. +In order to skip this check in the error case, the key ``{"verify_homogeneous_extents": false}`` may be set (alternatively ``export OPENPMD_VERIFY_HOMOGENEOUS_EXTENTS=0`` will do the same). +This will help read datasets with inconsistent metadata definitions. + The key ``resizable`` can be passed to ``Dataset`` options. It if set to ``{"resizable": true}``, this declares that it shall be allowed to increased the ``Extent`` of a ``Dataset`` via ``resetDataset()`` at a later time, i.e., after it has been first declared (and potentially written). For HDF5, resizable Datasets come with a performance penalty. diff --git a/include/openPMD/Dataset.hpp b/include/openPMD/Dataset.hpp index 80513683f9..e45605903d 100644 --- a/include/openPMD/Dataset.hpp +++ b/include/openPMD/Dataset.hpp @@ -54,9 +54,21 @@ class Dataset */ JOINED_DIMENSION = std::numeric_limits::max(), /** - * Some backends (i.e. JSON and TOML in template mode) support the - * creation of dataset with undefined datatype and extent. - * The extent should be given as {UNDEFINED_EXTENT} for that. + * In some use cases, the extent needs not be specified. + * For these, specify Extent{UNDEFINED_EXTENT}. + * Use cases: + * + * 1. Some backends (i.e. JSON and TOML in template mode) support the + * creation of datasets with undefined datatype and extent. + * The extent should be given as {UNDEFINED_EXTENT} for that. + * 2. With openPMD 2.0, the shape of constant components may be omitted + * in writing if it is defined somewhere else as part + * of the same Mesh / Species. + * (https://github.com/openPMD/openPMD-standard/pull/289) + * When reading such datasets, the openPMD-api will try to fill in + * the missing extents, so the extent for constistently-defined + * datasets should ideally not be reported by the read-side API + * as undefined. */ UNDEFINED_EXTENT = std::numeric_limits::max() - 1 }; @@ -87,5 +99,8 @@ class Dataset std::optional joinedDimension() const; static std::optional joinedDimension(Extent const &); + + bool undefinedExtent() const; + static bool undefinedExtent(Extent const &); }; } // namespace openPMD diff --git a/include/openPMD/IO/AbstractIOHandler.hpp b/include/openPMD/IO/AbstractIOHandler.hpp index 29b3de8bff..681343c0df 100644 --- a/include/openPMD/IO/AbstractIOHandler.hpp +++ b/include/openPMD/IO/AbstractIOHandler.hpp @@ -314,6 +314,7 @@ class AbstractIOHandler internal::SeriesStatus m_seriesStatus = internal::SeriesStatus::Default; IterationEncoding m_encoding = IterationEncoding::groupBased; OpenpmdStandard m_standard = auxiliary::parseStandard(getStandardDefault()); + bool m_verify_homogeneous_extents = true; }; // AbstractIOHandler } // namespace openPMD diff --git a/include/openPMD/IO/IOTask.hpp b/include/openPMD/IO/IOTask.hpp index 4c82cee174..08ee29bfff 100644 --- a/include/openPMD/IO/IOTask.hpp +++ b/include/openPMD/IO/IOTask.hpp @@ -22,6 +22,7 @@ #include "openPMD/ChunkInfo.hpp" #include "openPMD/Dataset.hpp" +#include "openPMD/Error.hpp" #include "openPMD/IterationEncoding.hpp" #include "openPMD/Streaming.hpp" #include "openPMD/auxiliary/Export.hpp" diff --git a/include/openPMD/Record.hpp b/include/openPMD/Record.hpp index 791c4c15f8..246b458b5f 100644 --- a/include/openPMD/Record.hpp +++ b/include/openPMD/Record.hpp @@ -53,7 +53,8 @@ class Record : public BaseRecord void flush_impl(std::string const &, internal::FlushParams const &) override; - void read(); + + [[nodiscard]] internal::HomogenizeExtents read(); }; // Record template diff --git a/include/openPMD/RecordComponent.hpp b/include/openPMD/RecordComponent.hpp index f319e10cff..d06b4213f4 100644 --- a/include/openPMD/RecordComponent.hpp +++ b/include/openPMD/RecordComponent.hpp @@ -158,6 +158,8 @@ class RecordComponent : public BaseRecordComponent * * Shrinking any dimension's extent. * * Changing the number of dimensions. * + * The dataset extent may be empty to indicate undefined extents. + * * Backend support for resizing datasets: * * JSON: Supported * * ADIOS2: Supported as of ADIOS2 2.7.0 @@ -540,6 +542,26 @@ OPENPMD_protected void verifyChunk(Datatype, Offset const &, Extent const &) const; }; // RecordComponent +namespace internal +{ + // Must put this after the definition of RecordComponent due to the + // deque + struct HomogenizeExtents + { + std::deque without_extent; + std::optional retrieved_extent; + bool verify_homogeneous_extents = true; + + explicit HomogenizeExtents(); + HomogenizeExtents(bool verify_homogeneous_extents); + + void check_extent(Attributable const &callsite, RecordComponent &); + auto merge(Attributable const &callsite, HomogenizeExtents) + -> HomogenizeExtents &; + void homogenize(Attributable const &callsite) &&; + }; +} // namespace internal + } // namespace openPMD #include "openPMD/UndefDatatypeMacros.hpp" diff --git a/include/openPMD/backend/Attributable.hpp b/include/openPMD/backend/Attributable.hpp index d34d5bb48f..8c519a9e12 100644 --- a/include/openPMD/backend/Attributable.hpp +++ b/include/openPMD/backend/Attributable.hpp @@ -20,6 +20,7 @@ */ #pragma once +#include "openPMD/Error.hpp" #include "openPMD/IO/AbstractIOHandler.hpp" #include "openPMD/ThrowError.hpp" #include "openPMD/auxiliary/OutOfRangeMsg.hpp" @@ -29,6 +30,7 @@ #include #include #include +#include #include #include #include @@ -54,6 +56,7 @@ namespace internal { class IterationData; class SeriesData; + struct HomogenizeExtents; class SharedAttributableData { @@ -105,6 +108,7 @@ namespace internal friend class openPMD::Attributable; using SharedData_t = std::shared_ptr; + using A_MAP = SharedData_t::element_type::A_MAP; public: AttributableData(); @@ -155,6 +159,32 @@ namespace internal std::shared_ptr(self, [](auto const *) {})); return res; } + + inline auto attributes() -> A_MAP & + { + return operator*().m_attributes; + } + [[nodiscard]] inline auto attributes() const -> A_MAP const & + { + return operator*().m_attributes; + } + [[nodiscard]] inline auto readAttribute(std::string const &name) const + -> Attribute const & + { + auto const &attr = attributes(); + if (auto it = attr.find(name); it != attr.end()) + { + return it->second; + } + else + { + throw error::ReadError( + error::AffectedObject::Attribute, + error::Reason::NotFound, + std::nullopt, + "Not found: '" + name + "'."); + } + } }; template @@ -213,6 +243,7 @@ class Attributable friend class StatefulSnapshotsContainer; friend class internal::AttributableData; friend class Snapshots; + friend struct internal::HomogenizeExtents; protected: // tag for internal constructor diff --git a/share/openPMD/json_schema/record_component.toml b/share/openPMD/json_schema/record_component.toml index 9a4741a9f2..09e6bca54e 100644 --- a/share/openPMD/json_schema/record_component.toml +++ b/share/openPMD/json_schema/record_component.toml @@ -44,7 +44,10 @@ title = "Either array or constant" [allOf.if] required = ["attributes"] -properties.attributes.required = ["shape", "value"] +# shape optional as of https://github.com/openPMD/openPMD-api/pull/1661 +# JSON schema not powerful enough to check that the shape must be defined +# at least once (just like we do not check that the shape is consistent). +properties.attributes.required = ["value"] [allOf.then] title = "Constant dataset" diff --git a/src/Dataset.cpp b/src/Dataset.cpp index a56c566805..f0c39cfc9d 100644 --- a/src/Dataset.cpp +++ b/src/Dataset.cpp @@ -95,4 +95,13 @@ std::optional Dataset::joinedDimension(Extent const &extent) } return res; } + +bool Dataset::undefinedExtent() const +{ + return undefinedExtent(extent); +} +bool Dataset::undefinedExtent(Extent const &e) +{ + return e.size() == 1 && e.at(0) == Dataset::UNDEFINED_EXTENT; +} } // namespace openPMD diff --git a/src/IO/ADIOS/ADIOS2IOHandler.cpp b/src/IO/ADIOS/ADIOS2IOHandler.cpp index 5d3ce17f36..b5583da9c7 100644 --- a/src/IO/ADIOS/ADIOS2IOHandler.cpp +++ b/src/IO/ADIOS/ADIOS2IOHandler.cpp @@ -833,6 +833,12 @@ void ADIOS2IOHandlerImpl::createDataset( "only is not possible."); } + if (Dataset::undefinedExtent(parameters.extent)) + { + throw error::OperationUnsupportedInBackend( + "ADIOS2", "No support for Datasets with undefined extent."); + } + if (!writable->written) { /* Sanitize name */ @@ -993,6 +999,12 @@ void ADIOS2IOHandlerImpl::extendDataset( VERIFY_ALWAYS( access::write(m_handler->m_backendAccess), "[ADIOS2] Cannot extend datasets in read-only mode."); + if (Dataset::undefinedExtent(parameters.extent)) + { + throw error::OperationUnsupportedInBackend( + "ADIOS2", "No support for Datasets with undefined extent."); + } + setAndGetFilePosition(writable); auto file = refreshFileFromParent(writable, /* preferParentFile = */ false); std::string name = nameOfVariable(writable); diff --git a/src/IO/HDF5/HDF5IOHandler.cpp b/src/IO/HDF5/HDF5IOHandler.cpp index 633dff1edc..7c874e61d8 100644 --- a/src/IO/HDF5/HDF5IOHandler.cpp +++ b/src/IO/HDF5/HDF5IOHandler.cpp @@ -834,6 +834,11 @@ void HDF5IOHandlerImpl::createDataset( error::throwOperationUnsupportedInBackend( "HDF5", "Joined Arrays currently only supported in ADIOS2"); } + else if (Dataset::undefinedExtent(parameters.extent)) + { + throw error::OperationUnsupportedInBackend( + "HDF5", "No support for Datasets with undefined extent."); + } if (!writable->written) { @@ -1114,6 +1119,11 @@ void HDF5IOHandlerImpl::extendDataset( error::throwOperationUnsupportedInBackend( "HDF5", "Joined Arrays currently only supported in ADIOS2"); } + else if (Dataset::undefinedExtent(parameters.extent)) + { + throw error::OperationUnsupportedInBackend( + "HDF5", "No support for Datasets with undefined extent."); + } File file = requireFile("extendDataset", writable, /* checkParent = */ true); diff --git a/src/Iteration.cpp b/src/Iteration.cpp index ba04e73981..eec2785888 100644 --- a/src/Iteration.cpp +++ b/src/Iteration.cpp @@ -676,6 +676,9 @@ void Iteration::readMeshes(std::string const &meshesPath) IOHandler()->enqueue(IOTask(&m, aList)); IOHandler()->flush(internal::defaultFlushParams); + // Find constant scalar meshes. shape generally required for meshes, + // shape also required for scalars. + // https://github.com/openPMD/openPMD-standard/pull/289 auto att_begin = aList.attributes->begin(); auto att_end = aList.attributes->end(); auto value = std::find(att_begin, att_end, "value"); diff --git a/src/Mesh.cpp b/src/Mesh.cpp index cfd1735898..6b38e7ef4f 100644 --- a/src/Mesh.cpp +++ b/src/Mesh.cpp @@ -21,6 +21,7 @@ #include "openPMD/Mesh.hpp" #include "openPMD/Error.hpp" #include "openPMD/IO/AbstractIOHandler.hpp" +#include "openPMD/RecordComponent.hpp" #include "openPMD/Series.hpp" #include "openPMD/ThrowError.hpp" #include "openPMD/UnitDimension.hpp" @@ -434,6 +435,8 @@ void Mesh::flush_impl( void Mesh::read() { + internal::HomogenizeExtents homogenizeExtents( + IOHandler()->m_verify_homogeneous_extents); internal::EraseStaleEntries map{*this}; using DT = Datatype; @@ -604,6 +607,7 @@ void Mesh::read() if (scalar()) { T_RecordComponent::read(); + homogenizeExtents.check_extent(*this, *this); } else { @@ -628,7 +632,9 @@ void Mesh::read() << "' and will skip it due to read error:\n" << err.what() << std::endl; map.forget(component); + continue; } + homogenizeExtents.check_extent(*this, rc); } Parameter dList; @@ -655,10 +661,14 @@ void Mesh::read() << "' and will skip it due to read error:\n" << err.what() << std::endl; map.forget(component); + continue; } + homogenizeExtents.check_extent(*this, rc); } } + std::move(homogenizeExtents).homogenize(*this); + readBase(); readAttributes(ReadMode::FullyReread); diff --git a/src/ParticleSpecies.cpp b/src/ParticleSpecies.cpp index 99ec6dfe6d..0476cb419e 100644 --- a/src/ParticleSpecies.cpp +++ b/src/ParticleSpecies.cpp @@ -19,6 +19,7 @@ * If not, see . */ #include "openPMD/ParticleSpecies.hpp" +#include "openPMD/RecordComponent.hpp" #include "openPMD/Series.hpp" #include "openPMD/auxiliary/DerefDynamicCast.hpp" #include "openPMD/backend/Writable.hpp" @@ -35,6 +36,8 @@ ParticleSpecies::ParticleSpecies() void ParticleSpecies::read() { + internal::HomogenizeExtents homogenizeExtents( + IOHandler()->m_verify_homogeneous_extents); /* obtain all non-scalar records */ Parameter pList; IOHandler()->enqueue(IOTask(this, pList)); @@ -76,17 +79,17 @@ void ParticleSpecies::read() auto att_begin = aList.attributes->begin(); auto att_end = aList.attributes->end(); auto value = std::find(att_begin, att_end, "value"); - auto shape = std::find(att_begin, att_end, "shape"); - if (value != att_end && shape != att_end) + if (value != att_end) { RecordComponent &rc = r; IOHandler()->enqueue(IOTask(&rc, pOpen)); IOHandler()->flush(internal::defaultFlushParams); rc.get().m_isConstant = true; } + internal::HomogenizeExtents recordExtents; try { - r.read(); + recordExtents = r.read(); } catch (error::ReadError const &err) { @@ -95,7 +98,9 @@ void ParticleSpecies::read() << err.what() << std::endl; map.forget(record_name); + continue; } + homogenizeExtents.merge(*this, std::move(recordExtents)); } } @@ -115,6 +120,7 @@ void ParticleSpecies::read() Parameter dOpen; for (auto const &record_name : *dList.datasets) { + internal::HomogenizeExtents recordExtents; try { Record &r = map[record_name]; @@ -127,7 +133,7 @@ void ParticleSpecies::read() rc.setWritten(false, Attributable::EnqueueAsynchronously::No); rc.resetDataset(Dataset(*dOpen.dtype, *dOpen.extent)); rc.setWritten(true, Attributable::EnqueueAsynchronously::No); - r.read(); + recordExtents = r.read(); } catch (error::ReadError const &err) { @@ -138,9 +144,13 @@ void ParticleSpecies::read() map.forget(record_name); //(*this)[record_name].erase(RecordComponent::SCALAR); // this->erase(record_name); + continue; } + homogenizeExtents.merge(*this, std::move(recordExtents)); } + std::move(homogenizeExtents).homogenize(*this); + readAttributes(ReadMode::FullyReread); } diff --git a/src/Record.cpp b/src/Record.cpp index c6baedec7c..25d896e901 100644 --- a/src/Record.cpp +++ b/src/Record.cpp @@ -19,7 +19,9 @@ * If not, see . */ #include "openPMD/Record.hpp" +#include "openPMD/Error.hpp" #include "openPMD/RecordComponent.hpp" +#include "openPMD/ThrowError.hpp" #include "openPMD/UnitDimension.hpp" #include "openPMD/backend/BaseRecord.hpp" @@ -107,21 +109,30 @@ void Record::flush_impl( } } -void Record::read() +auto Record::read() -> internal::HomogenizeExtents { + internal::HomogenizeExtents res(IOHandler()->m_verify_homogeneous_extents); + auto check_extent = [&](RecordComponent &rc) { + res.check_extent(*this, rc); + }; if (scalar()) { - /* using operator[] will incorrectly update parent */ - try - { - T_RecordComponent::read(/* require_unit_si = */ true); - } - catch (error::ReadError const &err) - { - std::cerr << "Cannot read scalar record component and will skip it " - "due to read error:\n" - << err.what() << std::endl; - } + [&]() { + /* using operator[] will incorrectly update parent */ + try + { + T_RecordComponent::read(/* require_unit_si = */ true); + } + catch (error::ReadError const &err) + { + std::cerr + << "Cannot read scalar record component and will skip it " + "due to read error:\n" + << err.what() << std::endl; + return; // from lambda + } + check_extent(*this); + }(); } else { @@ -146,7 +157,9 @@ void Record::read() << "' and will skip it due to read error:\n" << err.what() << std::endl; this->container().erase(component); + continue; } + check_extent(rc); } Parameter dList; @@ -173,13 +186,16 @@ void Record::read() << "' and will skip it due to read error:\n" << err.what() << std::endl; this->container().erase(component); + continue; } + check_extent(rc); } } readBase(); readAttributes(ReadMode::FullyReread); + return res; } template class BaseRecord; diff --git a/src/RecordComponent.cpp b/src/RecordComponent.cpp index 6d11fbfa77..c61fe82c83 100644 --- a/src/RecordComponent.cpp +++ b/src/RecordComponent.cpp @@ -24,7 +24,9 @@ #include "openPMD/Error.hpp" #include "openPMD/IO/Format.hpp" #include "openPMD/Series.hpp" +#include "openPMD/auxiliary/Environment.hpp" #include "openPMD/auxiliary/Memory.hpp" +#include "openPMD/auxiliary/StringManip.hpp" #include "openPMD/backend/Attributable.hpp" #include "openPMD/backend/BaseRecord.hpp" #include "openPMD/backend/Variant_internal.hpp" @@ -59,6 +61,116 @@ namespace internal a.setDirtyRecursive(true); m_chunks.push(std::move(task)); } + + static constexpr char const *note_on_deactivating_this_check = R"( +Note: In order to ignore inconsistent / incomplete extent definitions, +set the environment variable OPENPMD_VERIFY_HOMOGENEOUS_EXTENTS=0 +or alternatively the JSON option {"verify_homogeneous_extents": false}. + )"; + + HomogenizeExtents::HomogenizeExtents() = default; + HomogenizeExtents::HomogenizeExtents(bool verify_homogeneous_extents_in) + : verify_homogeneous_extents(verify_homogeneous_extents_in) + {} + + void HomogenizeExtents::check_extent( + Attributable const &callsite, RecordComponent &rc) + { + auto extent = rc.getExtent(); + if (Dataset::undefinedExtent(extent)) + { + without_extent.emplace_back(rc); + } + else if (retrieved_extent.has_value()) + { + if (verify_homogeneous_extents && extent != *retrieved_extent) + { + std::stringstream error_msg; + error_msg << "Inconsistent extents found for Record '" + << callsite.myPath().openPMDPath() << "': Component '" + << rc.myPath().openPMDPath() << "' has extent"; + auxiliary::write_vec_to_stream(error_msg, extent) << ", but "; + auxiliary::write_vec_to_stream(error_msg, *retrieved_extent) + << " was found previously." + << note_on_deactivating_this_check; + throw error::ReadError( + error::AffectedObject::Group, + error::Reason::UnexpectedContent, + std::nullopt, + error_msg.str()); + } + } + else + { + retrieved_extent = std::move(extent); + } + } + + auto HomogenizeExtents::merge( + Attributable const &callsite, HomogenizeExtents other) + -> HomogenizeExtents & + { + if (retrieved_extent.has_value() && other.retrieved_extent.has_value()) + { + if (verify_homogeneous_extents && + *retrieved_extent != *other.retrieved_extent) + { + std::stringstream error_msg; + error_msg << "Inconsistent extents found for Record '" + << callsite.myPath().openPMDPath() << "': "; + auxiliary::write_vec_to_stream(error_msg, *retrieved_extent) + << " vs. "; + auxiliary::write_vec_to_stream( + error_msg, *other.retrieved_extent) + << "." << note_on_deactivating_this_check; + throw error::ReadError( + error::AffectedObject::Group, + error::Reason::UnexpectedContent, + std::nullopt, + error_msg.str()); + } + } + else if (!retrieved_extent.has_value()) + { + retrieved_extent = std::move(other.retrieved_extent); + } + + for (auto &rc : other.without_extent) + { + this->without_extent.emplace_back(std::move(rc)); + } + return *this; + } + + void HomogenizeExtents::homogenize(Attributable const &callsite) && + { + if (!retrieved_extent.has_value()) + { + if (verify_homogeneous_extents) + { + throw error::ReadError( + error::AffectedObject::Group, + error::Reason::UnexpectedContent, + std::nullopt, + "No extent found for any component contained in '" + + callsite.myPath().openPMDPath() + "'." + + note_on_deactivating_this_check); + } + else + { + return; + } + } + auto &ext = *retrieved_extent; + for (auto &rc : without_extent) + { + rc.setWritten(false, Attributable::EnqueueAsynchronously::No); + rc.resetDataset(Dataset(Datatype::UNDEFINED, ext)); + rc.setWritten(true, Attributable::EnqueueAsynchronously::No); + } + without_extent.clear(); + } + } // namespace internal template @@ -118,7 +230,14 @@ RecordComponent &RecordComponent::resetDataset(Dataset d) throw std::runtime_error("Dataset extent must be at least 1D."); if (d.empty()) { - if (d.dtype != Datatype::UNDEFINED) + if (d.extent.empty()) + { + throw error::Internal( + "A zero-dimensional dataset is not to be considered empty, but " + "undefined. This error is an internal safeguard against future " + "changes that might not consider this."); + } + else if (d.dtype != Datatype::UNDEFINED) { return makeEmpty(std::move(d)); } @@ -171,7 +290,7 @@ Extent RecordComponent::getExtent() const } else { - return {1}; + return {Dataset::UNDEFINED_EXTENT}; } } @@ -300,6 +419,13 @@ void RecordComponent::flush( { setUnitSI(1); } + auto constant_component_write_shape = [&]() { + auto extent = getExtent(); + return !Dataset::undefinedExtent(extent) && + std::none_of(extent.begin(), extent.end(), [](auto val) { + return val == Dataset::JOINED_DIMENSION; + }); + }; if (!written()) { if (constant()) @@ -319,16 +445,19 @@ void RecordComponent::flush( Operation::WRITE_ATT>::ChangesOverSteps::IfPossible; } IOHandler()->enqueue(IOTask(this, aWrite)); - aWrite.name = "shape"; - Attribute a(getExtent()); - aWrite.dtype = a.dtype; - aWrite.m_resource = a.getAny(); - if (isVBased) + if (constant_component_write_shape()) { - aWrite.changesOverSteps = Parameter< - Operation::WRITE_ATT>::ChangesOverSteps::IfPossible; + aWrite.name = "shape"; + Attribute a(getExtent()); + aWrite.dtype = a.dtype; + aWrite.m_resource = a.getAny(); + if (isVBased) + { + aWrite.changesOverSteps = Parameter< + Operation::WRITE_ATT>::ChangesOverSteps::IfPossible; + } + IOHandler()->enqueue(IOTask(this, aWrite)); } - IOHandler()->enqueue(IOTask(this, aWrite)); } else { @@ -343,6 +472,13 @@ void RecordComponent::flush( { if (constant()) { + if (!constant_component_write_shape()) + { + throw error::WrongAPIUsage( + "Extended constant component from a previous shape to " + "one that cannot be written (empty or with joined " + "dimension)."); + } bool isVBased = retrieveSeries().iterationEncoding() == IterationEncoding::variableBased; Parameter aWrite; @@ -410,25 +546,27 @@ namespace void RecordComponent::readBase(bool require_unit_si) { using DT = Datatype; - // auto & rc = get(); - Parameter aRead; + auto &rc = get(); - if (constant() && !empty()) - { - aRead.name = "value"; - IOHandler()->enqueue(IOTask(this, aRead)); - IOHandler()->flush(internal::defaultFlushParams); + readAttributes(ReadMode::FullyReread); - Attribute a(Attribute::from_any, *aRead.m_resource); - DT dtype = *aRead.dtype; + auto read_constant = [&]() { + Attribute a = rc.readAttribute("value"); + DT dtype = a.dtype; setWritten(false, Attributable::EnqueueAsynchronously::No); switchNonVectorType(dtype, *this, a); setWritten(true, Attributable::EnqueueAsynchronously::No); - aRead.name = "shape"; - IOHandler()->enqueue(IOTask(this, aRead)); - IOHandler()->flush(internal::defaultFlushParams); - a = Attribute(Attribute::from_any, *aRead.m_resource); + if (!containsAttribute("shape")) + { + setWritten(false, Attributable::EnqueueAsynchronously::No); + resetDataset(Dataset(dtype, {Dataset::UNDEFINED_EXTENT})); + setWritten(true, Attributable::EnqueueAsynchronously::No); + + return; + } + + a = rc.attributes().at("shape"); Extent e; // uint64_t check @@ -438,7 +576,7 @@ void RecordComponent::readBase(bool require_unit_si) else { std::ostringstream oss; - oss << "Unexpected datatype (" << *aRead.dtype + oss << "Unexpected datatype (" << a.dtype << ") for attribute 'shape' (" << determineDatatype() << " aka uint64_t)"; throw error::ReadError( @@ -451,9 +589,12 @@ void RecordComponent::readBase(bool require_unit_si) setWritten(false, Attributable::EnqueueAsynchronously::No); resetDataset(Dataset(dtype, e)); setWritten(true, Attributable::EnqueueAsynchronously::No); - } + }; - readAttributes(ReadMode::FullyReread); + if (constant() && !empty()) + { + read_constant(); + } if (require_unit_si) { @@ -467,7 +608,8 @@ void RecordComponent::readBase(bool require_unit_si) "'" + myPath().openPMDPath() + "'."); } - if (!getAttribute("unitSI").getOptional().has_value()) + if (auto attr = getAttribute("unitSI"); + !attr.getOptional().has_value()) { throw error::ReadError( error::AffectedObject::Attribute, @@ -475,10 +617,8 @@ void RecordComponent::readBase(bool require_unit_si) {}, "Unexpected Attribute datatype for 'unitSI' (expected double, " "found " + - datatypeToString( - Attribute(Attribute::from_any, *aRead.m_resource) - .dtype) + - ") in '" + myPath().openPMDPath() + "'."); + datatypeToString(attr.dtype) + ") in '" + + myPath().openPMDPath() + "'."); } } } diff --git a/src/Series.cpp b/src/Series.cpp index e71ae49c65..c491e0d3e5 100644 --- a/src/Series.cpp +++ b/src/Series.cpp @@ -213,6 +213,8 @@ struct Series::ParsedInput std::string filenamePostfix; std::optional filenameExtension; int filenamePadding = -1; + // optional fields + bool verify_homogeneous_extents = true; }; // ParsedInput std::string Series::openPMD() const @@ -933,7 +935,17 @@ void Series::init( // Either an MPI_Comm or none, the template works for both options MPI_Communicator &&...comm) { - auto init_directly = [this, &comm..., at, &filepath]( + auto emplace_parse_config_options_into_iohandler = + [](AbstractIOHandler &ioHandler, ParsedInput &input) { + ioHandler.m_verify_homogeneous_extents = + input.verify_homogeneous_extents; + }; + + auto init_directly = [this, + &comm..., + at, + &filepath, + &emplace_parse_config_options_into_iohandler]( std::unique_ptr parsed_input, json::TracingJSON tracing_json) { auto io_handler = createIOHandler( @@ -945,12 +957,17 @@ void Series::init( comm..., tracing_json, filepath); + emplace_parse_config_options_into_iohandler(*io_handler, *parsed_input); initSeries(std::move(io_handler), std::move(parsed_input)); json::warnGlobalUnusedOptions(tracing_json); }; - auto init_deferred = [this, at, &filepath, &options, &comm...]( - std::string const &parsed_directory) { + auto init_deferred = [this, + at, + &filepath, + &options, + &emplace_parse_config_options_into_iohandler, + &comm...](std::string const &parsed_directory) { // Set a temporary IOHandler so that API calls which require a present // IOHandler don't fail writable().IOHandler = @@ -960,8 +977,12 @@ void Series::init( series.iterations.linkHierarchy(writable()); series.m_rankTable.m_attributable.linkHierarchy(writable()); series.m_deferred_initialization = - [called_this_already = false, filepath, options, at, comm...]( - Series &s) mutable { + [called_this_already = false, + filepath, + options, + at, + emplace_parse_config_options_into_iohandler, + comm...](Series &s) mutable { if (called_this_already) { throw std::runtime_error("Must be called one time only"); @@ -991,6 +1012,8 @@ void Series::init( comm..., tracing_json, filepath); + emplace_parse_config_options_into_iohandler( + *io_handler, *parsed_input); auto res = io_handler.get(); s.initSeries(std::move(io_handler), std::move(parsed_input)); json::warnGlobalUnusedOptions(tracing_json); @@ -3143,6 +3166,11 @@ void Series::parseJsonOptions(TracingJSON &options, ParsedInput &input) "hint_lazy_parsing_timeout", series.m_hintLazyParsingAfterTimeout, "OPENPMD_HINT_LAZY_PARSING_TIMEOUT"); + getJsonOption( + options, + "verify_homogeneous_extents", + input.verify_homogeneous_extents, + "OPENPMD_VERIFY_HOMOGENEOUS_EXTENTS"); internal::SeriesData::SourceSpecifiedViaJSON rankTableSource; if (getJsonOptionLowerCase(options, "rank_table", rankTableSource.value)) { diff --git a/src/backend/PatchRecord.cpp b/src/backend/PatchRecord.cpp index e674828181..87546ca177 100644 --- a/src/backend/PatchRecord.cpp +++ b/src/backend/PatchRecord.cpp @@ -45,7 +45,7 @@ void PatchRecord::flush_impl( { return; } - if (!this->datasetDefined()) + if (!this->scalar()) { if (IOHandler()->m_frontendAccess != Access::READ_ONLY) Container::flush( diff --git a/test/Files_ParallelIO/read_variablebased_randomaccess.cpp b/test/Files_ParallelIO/read_variablebased_randomaccess.cpp index 577f19ae61..7cce50d2df 100644 --- a/test/Files_ParallelIO/read_variablebased_randomaccess.cpp +++ b/test/Files_ParallelIO/read_variablebased_randomaccess.cpp @@ -6,6 +6,7 @@ #include #include +#include #if openPMD_HAVE_ADIOS2 && openPMD_HAVE_MPI #include @@ -120,8 +121,6 @@ static void create_file_in_serial(bool use_group_table) "__openPMD_groups/data", step, "", "/", true); IO.DefineAttribute( "__openPMD_groups/data/meshes", step, "", "/", true); - IO.DefineAttribute( - "__openPMD_groups/data/meshes/theta", step, "", "/", true); } std::vector data{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}; @@ -129,15 +128,6 @@ static void create_file_in_serial(bool use_group_table) if (step % 2 == 1) { engine.Put(variable2, data.data()); - if (use_group_table) - { - IO.DefineAttribute( - "__openPMD_groups/data/meshes/e_chargeDensity", - step, - "", - "/", - true); - } } engine.EndStep(); @@ -147,13 +137,16 @@ static void create_file_in_serial(bool use_group_table) } } -auto read_file_in_parallel(bool use_group_table) -> void +auto read_file_in_parallel( + std::optional const &dont_verify_homogeneous_extents) -> void { openPMD::Series read( "../samples/read_variablebased_randomaccess.bp", openPMD::Access::READ_ONLY, MPI_COMM_WORLD, - "adios2.engine.type = \"bp5\""); + json::merge( + "adios2.engine.type = \"bp5\"", + dont_verify_homogeneous_extents.value_or("{}"))); for (auto &[index, iteration] : read.snapshots()) { auto data = iteration.meshes["theta"].loadChunk({0}, {10}); @@ -163,42 +156,56 @@ auto read_file_in_parallel(bool use_group_table) -> void REQUIRE(data.get()[i] == int(i)); } // clang-format off - /* - * Step 0: - * uint64_t /data/snapshot attr = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9} - * Step 1: - * int32_t /data/meshes/e_chargeDensity {10} - * uint64_t /data/snapshot attr = {10, 11, 12, 13, 14, 15, 16, 17, 18, 19} - * Step 2: - * uint64_t /data/snapshot attr = {20, 21, 22, 23, 24, 25, 26, 27, 28, 29} - * Step 3: - * int32_t /data/meshes/e_chargeDensity {10} - * uint64_t /data/snapshot attr = {30, 31, 32, 33, 34, 35, 36, 37, 38, 39} - * Step 4: - * uint64_t /data/snapshot attr = {40, 41, 42, 43, 44, 45, 46, 47, 48, 49} - */ + /* + * Step 0: + * uint64_t /data/snapshot attr = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9} + * Step 1: + * int32_t /data/meshes/e_chargeDensity {10} + * uint64_t /data/snapshot attr = {10, 11, 12, 13, 14, 15, 16, 17, 18, 19} + * Step 2: + * uint64_t /data/snapshot attr = {20, 21, 22, 23, 24, 25, 26, 27, 28, 29} + * Step 3: + * int32_t /data/meshes/e_chargeDensity {10} + * uint64_t /data/snapshot attr = {30, 31, 32, 33, 34, 35, 36, 37, 38, 39} + * Step 4: + * uint64_t /data/snapshot attr = {40, 41, 42, 43, 44, 45, 46, 47, 48, 49} + */ // clang-format on size_t adios_step = index / 10; // 10 iterations per step bool step_has_charge_density = adios_step % 2 == 1; - if (use_group_table) + // Without a group table, the groups need to be recovered from + // attributes and variables found in the ADIOS2 file. But since the + // e_chargeDensity mesh exists only in a subselection of steps, its + // attributes will leak into the other steps, making the API see just an + // empty mesh. The behavior now depends on how strictly we are parsing: + // + // 1. If verify_homogeneous_extent == true (default): The reader will + // notice that no extent is defined anywhere, the mesh will be erased + // with a warning. + // 2. If verify_homogeneous_extent == false: An empty mesh will be + // returned. + if (!dont_verify_homogeneous_extents.has_value()) { REQUIRE( iteration.meshes.contains("e_chargeDensity") == step_has_charge_density); + if (step_has_charge_density) + { + REQUIRE(iteration.meshes["e_chargeDensity"].scalar()); + } } else { - // Without a group table, the groups need to be recovered from - // attributes and variables found in the ADIOS2 file. But since the - // e_chargeDensity mesh exists only in a subselection of steps, its - // attributes will leak into the other steps, making the API think - // that there is data where there is none. REQUIRE(iteration.meshes.contains("e_chargeDensity")); // Only when the variable is also found, the reading routines will // correctly determine that this is a scalar mesh. REQUIRE( iteration.meshes["e_chargeDensity"].scalar() == step_has_charge_density); + if (!step_has_charge_density) + { + REQUIRE(iteration.meshes["e_chargeDensity"].size() == 0); + } } if (step_has_charge_density) { @@ -222,13 +229,14 @@ auto read_variablebased_randomaccess() -> void create_file_in_serial(true); } MPI_Barrier(MPI_COMM_WORLD); - read_file_in_parallel(true); + // read_file_in_parallel(std::nullopt); if (rank == 0) { create_file_in_serial(false); } MPI_Barrier(MPI_COMM_WORLD); - read_file_in_parallel(false); + read_file_in_parallel(std::nullopt); + read_file_in_parallel(R"({"verify_homogeneous_extents": false})"); } } // namespace read_variablebased_randomaccess #else diff --git a/test/Files_SerialIO/SerialIOTests.hpp b/test/Files_SerialIO/SerialIOTests.hpp index f5e770681b..c534ded579 100644 --- a/test/Files_SerialIO/SerialIOTests.hpp +++ b/test/Files_SerialIO/SerialIOTests.hpp @@ -12,3 +12,7 @@ namespace issue_1744_unique_ptrs_at_close_time { auto issue_1744_unique_ptrs_at_close_time() -> void; } +namespace components_without_extent +{ +auto components_without_extent() -> void; +} diff --git a/test/Files_SerialIO/components_without_extent.cpp b/test/Files_SerialIO/components_without_extent.cpp new file mode 100644 index 0000000000..0fcc3f9e3a --- /dev/null +++ b/test/Files_SerialIO/components_without_extent.cpp @@ -0,0 +1,268 @@ +#include "SerialIOTests.hpp" + +#include "openPMD/openPMD.hpp" + +#include + +#include +#include + +namespace components_without_extent +{ +constexpr char const *filepath = "../samples/components_without_extent.json"; + +void particle_offset_without_extent() +{ + // write + { + openPMD::Series write(filepath, openPMD::Access::CREATE); + auto it0 = write.writeIterations()[0]; + auto e = it0.particles["e"]; + for (auto comp_id : {"x", "y", "z"}) + { + auto position_comp = e["position"][comp_id]; + position_comp.resetDataset({openPMD::Datatype::FLOAT, {5}}); + std::unique_ptr data{new float[5]}; + std::iota(data.get(), data.get() + 5, 0); + position_comp.storeChunk(std::move(data), {0}, {5}); + + auto offset_comp = e["positionOffset"][comp_id]; + offset_comp.resetDataset( + {openPMD::Datatype::INT, {openPMD::Dataset::UNDEFINED_EXTENT}}); + offset_comp.makeConstant(0); + } + write.close(); + } + + // read + { + openPMD::Series read(filepath, openPMD::Access::READ_RANDOM_ACCESS); + auto e = read.snapshots()[0].particles["e"]; + for (auto const &record : e) + { + for (auto const &component : record.second) + { + REQUIRE(component.second.getExtent() == openPMD::Extent{5}); + } + } + } +} + +void particles_without_any_extent() +{ + // write + { + openPMD::Series write(filepath, openPMD::Access::CREATE); + auto it0 = write.writeIterations()[0]; + auto e = it0.particles["e"]; + for (auto comp_id : {"x", "y", "z"}) + { + auto position_comp = e["position"][comp_id]; + position_comp.resetDataset( + {openPMD::Datatype::INT, {openPMD::Dataset::UNDEFINED_EXTENT}}); + position_comp.makeConstant(0); + + auto offset_comp = e["positionOffset"][comp_id]; + offset_comp.resetDataset( + {openPMD::Datatype::INT, {openPMD::Dataset::UNDEFINED_EXTENT}}); + offset_comp.makeConstant(0); + } + write.close(); + } + + // read + { + openPMD::Series read(filepath, openPMD::Access::READ_RANDOM_ACCESS); + REQUIRE(!read.snapshots()[0].particles.contains("e")); + } + + { + openPMD::Series read( + filepath, + openPMD::Access::READ_RANDOM_ACCESS, + R"({"verify_homogeneous_extents": false})"); + REQUIRE(read.snapshots()[0].particles.contains("e")); + auto e = read.snapshots()[0].particles["e"]; + for (auto const &record : e) + { + for (auto const &component : record.second) + { + REQUIRE( + component.second.getExtent() == + openPMD::Extent{openPMD::Dataset::UNDEFINED_EXTENT}); + } + } + } +} + +void particles_without_inconsistent_extent() +{ + // write + { + openPMD::Series write(filepath, openPMD::Access::CREATE); + auto it0 = write.writeIterations()[0]; + auto e = it0.particles["e"]; + for (auto comp_id : {"x", "y", "z"}) + { + auto position_comp = e["position"][comp_id]; + position_comp.resetDataset({openPMD::Datatype::INT, {5}}); + position_comp.makeConstant(0); + + auto offset_comp = e["positionOffset"][comp_id]; + offset_comp.resetDataset({openPMD::Datatype::INT, {10}}); + offset_comp.makeConstant(0); + } + write.close(); + } + + // read + { + openPMD::Series read(filepath, openPMD::Access::READ_RANDOM_ACCESS); + REQUIRE(!read.snapshots()[0].particles.contains("e")); + } + + { + openPMD::Series read( + filepath, + openPMD::Access::READ_RANDOM_ACCESS, + R"({"verify_homogeneous_extents": false})"); + REQUIRE(read.snapshots()[0].particles.contains("e")); + auto e = read.snapshots()[0].particles["e"]; + for (auto const &component : e["position"]) + { + REQUIRE(component.second.getExtent() == openPMD::Extent{5}); + } + for (auto const &component : e["positionOffset"]) + { + REQUIRE(component.second.getExtent() == openPMD::Extent{10}); + } + } +} + +void meshes_with_incomplete_extent() +{ + // write + { + openPMD::Series write(filepath, openPMD::Access::CREATE); + auto it0 = write.writeIterations()[0]; + auto E = it0.meshes["E"]; + for (auto comp_id : {"x"}) + { + auto comp = E[comp_id]; + comp.resetDataset({openPMD::Datatype::FLOAT, {5}}); + std::unique_ptr data{new float[5]}; + std::iota(data.get(), data.get() + 5, 0); + comp.storeChunk(std::move(data), {0}, {5}); + } + for (auto comp_id : {"y", "z"}) + { + auto comp = E[comp_id]; + comp.resetDataset( + {openPMD::Datatype::INT, {openPMD::Dataset::UNDEFINED_EXTENT}}); + comp.makeConstant(0); + } + write.close(); + } + + // read + { + openPMD::Series read(filepath, openPMD::Access::READ_RANDOM_ACCESS); + auto E = read.snapshots()[0].meshes["E"]; + for (auto const &component : E) + { + REQUIRE(component.second.getExtent() == openPMD::Extent{5}); + } + } +} + +void meshes_with_inconsistent_extent() +{ + // write + { + openPMD::Series write(filepath, openPMD::Access::CREATE); + auto it0 = write.writeIterations()[0]; + auto E = it0.meshes["E"]; + size_t i = 1; + for (auto comp_id : {"x", "y", "z"}) + { + size_t extent = i++ * 5; + auto comp = E[comp_id]; + comp.resetDataset({openPMD::Datatype::FLOAT, {extent}}); + std::unique_ptr data{new float[extent]}; + std::iota(data.get(), data.get() + extent, 0); + comp.storeChunk(std::move(data), {0}, {extent}); + } + write.close(); + } + + // read + { + openPMD::Series read(filepath, openPMD::Access::READ_RANDOM_ACCESS); + REQUIRE(!read.snapshots()[0].meshes.contains("E")); + } + + // read + { + openPMD::Series read( + filepath, + openPMD::Access::READ_RANDOM_ACCESS, + R"({"verify_homogeneous_extents": false})"); + auto E = read.snapshots()[0].meshes["E"]; + size_t i = 1; + for (auto const &component : E) + { + REQUIRE(component.second.getExtent() == openPMD::Extent{5 * i++}); + } + } +} + +void meshes_without_any_extent() +{ + // write + { + openPMD::Series write(filepath, openPMD::Access::CREATE); + auto it0 = write.writeIterations()[0]; + auto E = it0.meshes["E"]; + for (auto comp_id : {"x", "y", "z"}) + { + auto comp = E[comp_id]; + comp.resetDataset( + {openPMD::Datatype::FLOAT, + {openPMD::Dataset::UNDEFINED_EXTENT}}); + comp.makeConstant(0); + } + write.close(); + } + + // read + { + openPMD::Series read(filepath, openPMD::Access::READ_RANDOM_ACCESS); + REQUIRE(!read.snapshots()[0].meshes.contains("E")); + } + + // read + { + openPMD::Series read( + filepath, + openPMD::Access::READ_RANDOM_ACCESS, + R"({"verify_homogeneous_extents": false})"); + auto E = read.snapshots()[0].meshes["E"]; + for (auto const &component : E) + { + REQUIRE( + component.second.getExtent() == + openPMD::Extent{openPMD::Dataset::UNDEFINED_EXTENT}); + } + } +} + +auto components_without_extent() -> void +{ + particle_offset_without_extent(); + particles_without_any_extent(); + particles_without_inconsistent_extent(); + meshes_with_incomplete_extent(); + meshes_with_inconsistent_extent(); + meshes_without_any_extent(); +} +} // namespace components_without_extent diff --git a/test/SerialIOTest.cpp b/test/SerialIOTest.cpp index 432dd864f3..042b609220 100644 --- a/test/SerialIOTest.cpp +++ b/test/SerialIOTest.cpp @@ -768,6 +768,11 @@ TEST_CASE("issue_1744_unique_ptrs_at_close_time", "[serial]") #endif } +TEST_CASE("components_without_extent", "[serial]") +{ + components_without_extent::components_without_extent(); +} + #if openPMD_HAVE_ADIOS2 TEST_CASE("close_and_reopen_test", "[serial]") { @@ -806,7 +811,9 @@ inline void empty_dataset_test(std::string const &file_ending) } { Series series( - "../samples/empty_datasets." + file_ending, Access::READ_ONLY); + "../samples/empty_datasets." + file_ending, + Access::READ_ONLY, + R"({"verify_homogeneous_extents": false})"); REQUIRE(series.iterations.contains(1)); REQUIRE(series.iterations.count(1) == 1); @@ -2718,7 +2725,8 @@ TEST_CASE("empty_alternate_fbpic", "[serial][hdf5]") { Series s = Series( "../samples/issue-sample/empty_alternate_fbpic_%T.h5", - Access::READ_ONLY); + Access::READ_ONLY, + R"({"verify_homogeneous_extents": false})"); REQUIRE(s.iterations.contains(50)); REQUIRE(s.iterations[50].particles.contains("electrons")); REQUIRE( @@ -5856,21 +5864,21 @@ void variableBasedSeries(std::string const &file) // this tests changing extents across iterations // ADIOS2 does not support changing the dimensionality // (older versions used to somewhat support it, but not really) - auto E_y = iteration.meshes["E"]["y"]; + auto B_y = iteration.meshes["B"]["y"]; unsigned dimensionality = 3; unsigned len = i + 1; Extent changingExtent(dimensionality, len); - E_y.resetDataset({openPMD::Datatype::INT, changingExtent}); + B_y.resetDataset({openPMD::Datatype::INT, changingExtent}); std::vector changingData( std::pow(len, dimensionality), dimensionality); - E_y.storeChunk( + B_y.storeChunk( changingData, Offset(dimensionality, 0), changingExtent); // this tests datasets that are present in one iteration, but not // in others - auto E_z = iteration.meshes["E"][std::to_string(i)]; - E_z.resetDataset({Datatype::INT, {1}}); - E_z.makeConstant(i); + auto rho_i = iteration.meshes["rho"][std::to_string(i)]; + rho_i.resetDataset({Datatype::INT, {1}}); + rho_i.makeConstant(i); // this tests attributes that are present in one iteration, but not // in others iteration.meshes["E"].setAttribute("attr_" + std::to_string(i), i); @@ -5984,11 +5992,11 @@ void variableBasedSeries(std::string const &file) REQUIRE(chunk2.get()[i] == int(index)); } - auto E_y = iteration.meshes["E"]["y"]; + auto B_y = iteration.meshes["B"]["y"]; unsigned dimensionality = 3; unsigned len = index + 1; Extent changingExtent(dimensionality, len); - REQUIRE(E_y.getExtent() == changingExtent); + REQUIRE(B_y.getExtent() == changingExtent); last_iteration_index = index; @@ -5999,7 +6007,7 @@ void variableBasedSeries(std::string const &file) { // component is present <=> (otherIteration == i) REQUIRE( - iteration.meshes["E"].contains( + iteration.meshes["rho"].contains( std::to_string(otherIteration)) == (otherIteration == index)); REQUIRE( @@ -6008,7 +6016,7 @@ void variableBasedSeries(std::string const &file) (otherIteration <= index)); } REQUIRE( - iteration.meshes["E"][std::to_string(index)] + iteration.meshes["rho"][std::to_string(index)] .getAttribute("value") .get() == int(index)); REQUIRE( @@ -6734,7 +6742,11 @@ void extendDataset(std::string const &ext, std::string const &jsonConfig) } { - Series read(filename, Access::READ_ONLY, jsonConfig); + Series read( + filename, + Access::READ_ONLY, + json::merge( + jsonConfig, R"({"verify_homogeneous_extents": false})")); auto E_x = read.iterations[0].meshes["E"]["x"]; REQUIRE(E_x.getExtent() == Extent{10, 5}); auto chunk = E_x.loadChunk({0, 0}, {10, 5}); @@ -7062,8 +7074,11 @@ void unfinished_iteration_test( auto tryReading = [&config, file, encoding]( Access access, std::string const &additionalConfig = "{}") { + auto merged_config = json::merge( + json::merge(config, additionalConfig), + R"({"verify_homogeneous_extents": false})"); { - Series read(file, access, json::merge(config, additionalConfig)); + Series read(file, access, merged_config); std::vector iterations; std::cout << "\n\n\nGoing to list iterations in " << file @@ -7114,7 +7129,7 @@ void unfinished_iteration_test( if (encoding == IterationEncoding::fileBased && access == Access::READ_ONLY) { - Series read(file, access, json::merge(config, additionalConfig)); + Series read(file, access, merged_config); if (additionalConfig == "{}") { // Eager parsing, defective iteration has already been removed diff --git a/test/python/unittest/API/APITest.py b/test/python/unittest/API/APITest.py index 24dfec4051..500752b9b1 100644 --- a/test/python/unittest/API/APITest.py +++ b/test/python/unittest/API/APITest.py @@ -1953,9 +1953,9 @@ def makeIteratorRoundTrip(self, backend, file_ending): E_x.reset_dataset(DS(np.dtype("int"), extent)) E_x.store_chunk(data, [0], extent) - E_y = it.meshes["E"]["y"] - E_y.reset_dataset(DS(np.dtype("int"), [2, 2])) - span = E_y.store_chunk().current_buffer() + B_y = it.meshes["B"]["y"] + B_y.reset_dataset(DS(np.dtype("int"), [2, 2])) + span = B_y.store_chunk().current_buffer() span[0, 0] = 0 span[0, 1] = 1 span[1, 0] = 2 @@ -1978,8 +1978,8 @@ def makeIteratorRoundTrip(self, backend, file_ending): lastIterationIndex = it.iteration_index E_x = it.meshes["E"]["x"] chunk = E_x.load_chunk([0], extent) - E_y = it.meshes["E"]["y"] - chunk2 = E_y.load_chunk([0, 0], [2, 2]) + B_y = it.meshes["B"]["y"] + chunk2 = B_y.load_chunk([0, 0], [2, 2]) it.close() for i in range(len(data)):