diff --git a/.github/workflows/github-actions-ci.yml b/.github/workflows/github-actions-ci.yml index 372c67b8..20c72982 100644 --- a/.github/workflows/github-actions-ci.yml +++ b/.github/workflows/github-actions-ci.yml @@ -20,7 +20,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.9", "3.11"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14-dev"] steps: - uses: actions/checkout@v3 @@ -28,13 +28,14 @@ jobs: uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - name: Install dependencies + - name: Install system packages run: | sudo apt-get update - sudo apt-get install -y hdf5-tools curl + sudo apt-get install -y hdf5-tools curl libhdf5-dev + - name: Install pip dependencies + run: | python -m pip install --upgrade pip pip install -r requirements.txt - pip install allensdk - name: Run tests run: | pip install -r requirements-test.txt @@ -51,7 +52,7 @@ jobs: runs-on: ["self-hosted"] strategy: matrix: - pyver: ["3.9" , "3.11"] + pyver: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14-rc"] steps: - uses: actions/checkout@v4 - name: Build docker image diff --git a/.github/workflows/nightly-onprem.yml b/.github/workflows/nightly-onprem.yml index 1f8680d0..21dd6814 100644 --- a/.github/workflows/nightly-onprem.yml +++ b/.github/workflows/nightly-onprem.yml @@ -10,7 +10,7 @@ jobs: runs-on: ["self-hosted"] strategy: matrix: - pyver: ["3.9" , "3.11"] + pyver: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14-rc"] branch: ["master", "feature/**"] steps: - uses: actions/checkout@v4 diff --git a/docker/Dockerfile b/docker/Dockerfile index ec19e870..9b856d3b 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -9,6 +9,7 @@ RUN apt-get update \ && apt-get install -y \ hdf5-tools \ curl \ + libhdf5-dev \ git-lfs \ && rm -rf /var/lib/apt/lists/* diff --git a/ipfx/attach_metadata/sink/nwb2_sink.py b/ipfx/attach_metadata/sink/nwb2_sink.py index 2f8f7c07..7ffe4a44 100644 --- a/ipfx/attach_metadata/sink/nwb2_sink.py +++ b/ipfx/attach_metadata/sink/nwb2_sink.py @@ -115,8 +115,8 @@ def _get_single_ic_electrode(self) -> pynwb.icephys.IntracellularElectrode: """ - keys = list(self.nwbfile.ic_electrodes.keys()) - + keys = list(self.nwbfile.icephys_electrodes.keys()) + if len(keys) != 1: raise ValueError( "expected exactly 1 intracellular electrode, found " diff --git a/ipfx/bin/generate_fx_input.py b/ipfx/bin/generate_fx_input.py index 332f092f..7e9fe5b8 100755 --- a/ipfx/bin/generate_fx_input.py +++ b/ipfx/bin/generate_fx_input.py @@ -1,5 +1,5 @@ import os -import allensdk.core.json_utilities as ju +import ipfx.json_utilities as ju import ipfx.sweep_props as sp from ipfx.bin.run_sweep_extraction import run_sweep_extraction from ipfx.bin.generate_qc_input import generate_qc_input diff --git a/ipfx/bin/generate_pipeline_input.py b/ipfx/bin/generate_pipeline_input.py index 07c0a1c1..c001f984 100755 --- a/ipfx/bin/generate_pipeline_input.py +++ b/ipfx/bin/generate_pipeline_input.py @@ -1,5 +1,5 @@ import ipfx.qc_feature_evaluator as qcp -import allensdk.core.json_utilities as ju +import ipfx.json_utilities as ju import os.path from ipfx.bin.generate_se_input import generate_se_input, parse_args import ipfx.lims_queries as lq diff --git a/ipfx/bin/generate_qc_input.py b/ipfx/bin/generate_qc_input.py index b35aff91..affb0044 100755 --- a/ipfx/bin/generate_qc_input.py +++ b/ipfx/bin/generate_qc_input.py @@ -2,7 +2,7 @@ from ipfx.bin.run_sweep_extraction import run_sweep_extraction from ipfx.bin.generate_se_input import generate_se_input, parse_args import ipfx.sweep_props as sp -import allensdk.core.json_utilities as ju +import ipfx.json_utilities as ju import ipfx.logging_utils as lu diff --git a/ipfx/bin/generate_se_input.py b/ipfx/bin/generate_se_input.py index 7c644a09..a4c6fde3 100755 --- a/ipfx/bin/generate_se_input.py +++ b/ipfx/bin/generate_se_input.py @@ -1,4 +1,4 @@ -import allensdk.core.json_utilities as ju +import ipfx.json_utilities as ju import os import ipfx.lims_queries as lq import argparse diff --git a/ipfx/bin/make_stimulus_ontology.py b/ipfx/bin/make_stimulus_ontology.py index 34940242..8f3c734b 100755 --- a/ipfx/bin/make_stimulus_ontology.py +++ b/ipfx/bin/make_stimulus_ontology.py @@ -1,4 +1,4 @@ -import allensdk.core.json_utilities as ju +import ipfx.json_utilities as ju from ipfx.stimulus import StimulusOntology import re import ipfx.lims_queries as lq @@ -84,5 +84,5 @@ def main(): make_default_stimulus_ontology() -if __name__== "__main__": +if __name__== "__main__": main() diff --git a/ipfx/bin/pipeline_from_specimen_id.py b/ipfx/bin/pipeline_from_specimen_id.py index 73ed6646..983f077d 100755 --- a/ipfx/bin/pipeline_from_specimen_id.py +++ b/ipfx/bin/pipeline_from_specimen_id.py @@ -1,4 +1,4 @@ -import allensdk.core.json_utilities as ju +import ipfx.json_utilities as ju import sys import os.path from .run_pipeline import run_pipeline @@ -34,19 +34,21 @@ def main(): pipe_input = gpi.generate_pipeline_input(cell_dir, specimen_id=int(specimen_id)) - input_json = os.path.join(cell_dir,INPUT_JSON) - ju.write(input_json,pipe_input) + input_json = os.path.join(cell_dir, INPUT_JSON) + ju.write(input_json, pipe_input) # reading back from disk pipe_input = ju.read(input_json) - pipe_output = run_pipeline(pipe_input["input_nwb_file"], - pipe_input["output_nwb_file"], - pipe_input.get("stimulus_ontology_file", None), - pipe_input.get("qc_fig_dir",None), - pipe_input["qc_criteria"], - pipe_input["manual_sweep_states"]) - - ju.write(os.path.join(cell_dir,OUTPUT_JSON), pipe_output) + pipe_output = run_pipeline( + pipe_input["input_nwb_file"], + pipe_input["output_nwb_file"], + pipe_input.get("stimulus_ontology_file", None), + pipe_input.get("qc_fig_dir",None), + pipe_input["qc_criteria"], + pipe_input["manual_sweep_states"], + ) + + ju.write(os.path.join(cell_dir, OUTPUT_JSON), pipe_output) if __name__ == "__main__": main() diff --git a/ipfx/bin/plot_ephys_nwb.py b/ipfx/bin/plot_ephys_nwb.py index 0a816c36..1076bef8 100755 --- a/ipfx/bin/plot_ephys_nwb.py +++ b/ipfx/bin/plot_ephys_nwb.py @@ -6,7 +6,7 @@ from ipfx.qc_feature_extractor import sweep_qc_features from ipfx.utilities import drop_failed_sweeps from ipfx.stimulus import StimulusOntology -import allensdk.core.json_utilities as ju +import ipfx.json_utilities as ju from typing import ( Optional, List, Dict, Tuple, Collection, Sequence, Union ) @@ -27,11 +27,11 @@ def plot_data_set(data_set, data_set.sweep_info = sweep_qc_features(data_set) sweep_table = data_set.filtered_sweep_table(clamp_mode=clamp_mode, stimuli=stimuli, stimuli_exclude=stimuli_exclude) - + if len(sweep_table)==0: warnings.warn("No sweeps to plot") return - + height_ratios, width_ratios = axes_ratios(sweep_table) fig, ax = plt.subplots(len(height_ratios), 3, @@ -51,7 +51,7 @@ def plot_data_set(data_set, annot = sweep_numbers.astype(str) if show_amps: annot += sweep_set_table['stimulus_amplitude'].apply(": {:.3g} pA".format) - + ax_a = ax[fig_row,0] ax_i = ax[fig_row,1] diff --git a/ipfx/bin/run_chirp_fv_extraction.py b/ipfx/bin/run_chirp_fv_extraction.py old mode 100644 new mode 100755 index b6d38253..b55eb0e6 --- a/ipfx/bin/run_chirp_fv_extraction.py +++ b/ipfx/bin/run_chirp_fv_extraction.py @@ -4,7 +4,7 @@ import traceback from multiprocessing import Pool from functools import partial -import allensdk.core.json_utilities as ju +import ipfx.json_utilities as ju from ipfx.stimulus import StimulusOntology import ipfx.script_utils as su @@ -32,9 +32,9 @@ class CollectChirpFeatureVectorParameters(ags.ArgSchema): ], cli_as_single_argument=True) data_source = ags.fields.String( - description="Source of NWB files ('sdk' or 'lims')", - default="sdk", - validate=lambda x: x in ["sdk", "lims"] + description="Source of NWB files ('lims' is only currently implemented option)", + default="lims", + validate=lambda x: x in ["lims"] ) @@ -142,4 +142,4 @@ def main(output_dir, output_code, input_file, include_failed_cells, if __name__ == "__main__": module = ags.ArgSchemaParser(schema_type=CollectChirpFeatureVectorParameters) - main(**module.args) \ No newline at end of file + main(**module.args) diff --git a/ipfx/bin/run_feature_collection.py b/ipfx/bin/run_feature_collection.py old mode 100644 new mode 100755 index a68ca2b9..ec265e0c --- a/ipfx/bin/run_feature_collection.py +++ b/ipfx/bin/run_feature_collection.py @@ -6,7 +6,7 @@ import ipfx.feature_vectors as fv import ipfx.script_utils as su from ipfx.stimulus import StimulusOntology -import allensdk.core.json_utilities as ju +import ipfx.json_utilities as ju import logging from multiprocessing import Pool from functools import partial @@ -20,9 +20,9 @@ class CollectFeatureParameters(ags.ArgSchema): include_failed_cells = ags.fields.Boolean(default=False) run_parallel = ags.fields.Boolean(default=True) data_source = ags.fields.String( - description="Source of NWB files ('sdk' or 'lims' or 'filesystem')", - default="sdk", - validate=lambda x: x in ["sdk", "lims", "filesystem"] + description="Source of NWB files ('lims' or 'filesystem')", + default="lims", + validate=lambda x: x in ["lims", "filesystem"] ) @@ -97,7 +97,7 @@ def extract_features(data_set, ramp_sweep_numbers, ssq_sweep_numbers, lsq_sweep_ mask_supra = sweep_table["stim_amp"] >= basic_lsq_features["rheobase_i"] sweep_indexes = fv._consolidated_long_square_indexes(sweep_table.loc[mask_supra, :]) amps = np.rint(sweep_table.loc[sweep_indexes, "stim_amp"].values - basic_lsq_features["rheobase_i"]) - spike_data = np.array(basic_lsq_features["spikes_set"]) + spike_data = np.array(basic_lsq_features["spikes_set"], dtype=object) for amp, swp_ind in zip(amps, sweep_indexes): if (amp % amp_interval != 0) or (amp > max_above_rheo) or (amp < 0): diff --git a/ipfx/bin/run_feature_extraction.py b/ipfx/bin/run_feature_extraction.py index 711cc43c..b32ffbf1 100755 --- a/ipfx/bin/run_feature_extraction.py +++ b/ipfx/bin/run_feature_extraction.py @@ -7,7 +7,7 @@ from ipfx._schemas import FeatureExtractionParameters from ipfx.dataset.create import create_ephys_data_set import ipfx.sweep_props as sp -import allensdk.core.json_utilities as ju +import ipfx.json_utilities as ju from ipfx.nwb_append import append_spike_times import ipfx.plot_qc_figures as plotqc diff --git a/ipfx/bin/run_feature_vector_extraction.py b/ipfx/bin/run_feature_vector_extraction.py old mode 100644 new mode 100755 index 306ba193..af967650 --- a/ipfx/bin/run_feature_vector_extraction.py +++ b/ipfx/bin/run_feature_vector_extraction.py @@ -7,7 +7,7 @@ import os import h5py from ipfx.stimulus import StimulusOntology -import allensdk.core.json_utilities as ju +import ipfx.json_utilities as ju import ipfx.feature_vectors as fv import ipfx.lims_queries as lq import ipfx.script_utils as su @@ -26,9 +26,9 @@ class CollectFeatureVectorParameters(ags.ArgSchema): allow_none=True ) data_source = ags.fields.String( - description="Source of NWB files ('sdk' or 'lims' or 'filesystem')", - default="sdk", - validate=lambda x: x in ["sdk", "lims", "filesystem"] + description="Source of NWB files ('lims' or 'filesystem')", + default="lims", + validate=lambda x: x in ["lims", "filesystem"] ) output_code = ags.fields.String( description="Code used for naming of output files", diff --git a/ipfx/bin/run_pipeline.py b/ipfx/bin/run_pipeline.py index 0b46640e..febf28c3 100755 --- a/ipfx/bin/run_pipeline.py +++ b/ipfx/bin/run_pipeline.py @@ -1,8 +1,7 @@ import logging import argschema as ags -import allensdk.core.json_utilities as json_utilities - +import ipfx.json_utilities as json_utilities import ipfx.sweep_props as sweep_props from ipfx.logging_utils import log_pretty_header from ipfx._schemas import PipelineParameters diff --git a/ipfx/bin/run_pipeline_from_nwb_file.py b/ipfx/bin/run_pipeline_from_nwb_file.py index 5484cec8..1e4b529d 100755 --- a/ipfx/bin/run_pipeline_from_nwb_file.py +++ b/ipfx/bin/run_pipeline_from_nwb_file.py @@ -1,4 +1,4 @@ -import allensdk.core.json_utilities as ju +import ipfx.json_utilities as ju import os.path from ipfx.bin.run_pipeline import run_pipeline from ipfx.bin.generate_pipeline_input import generate_pipeline_input @@ -37,7 +37,7 @@ def main(): ) ) parser.add_argument( - "--output_json", type=str, default="output.json", + "--output_json", type=str, default="output.json", help=( "write output json file here (relative to OUTPUT_DIR/cell_name, " "where cell_name is the extensionless basename of the input NWB " @@ -49,7 +49,7 @@ def main(): help=( "Generate qc figures and store them here (relative to " "OUTPUT_DIR/cell_name, where cell_name is the extensionless " - "basename of the input nwb file). If you supply --qc_fig_dir with " + "basename of the input nwb file). If you supply --qc_fig_dir with " "no arguments, the path will be OUTPUT_DIR/cell_name/qc_figs. If " "this argument is not supplied, no figures will be generated." ) diff --git a/ipfx/bin/run_qc.py b/ipfx/bin/run_qc.py index b0f0d120..626a97a1 100755 --- a/ipfx/bin/run_qc.py +++ b/ipfx/bin/run_qc.py @@ -4,7 +4,7 @@ import ipfx.qc_feature_evaluator as qcp import argschema as ags from ipfx._schemas import QcParameters -import allensdk.core.json_utilities as ju +import ipfx.json_utilities as ju import ipfx.sweep_props as sp import pandas as pd import ipfx.logging_utils as lu diff --git a/ipfx/bin/run_sweep_extraction.py b/ipfx/bin/run_sweep_extraction.py index d8e9b088..35e8a379 100755 --- a/ipfx/bin/run_sweep_extraction.py +++ b/ipfx/bin/run_sweep_extraction.py @@ -1,7 +1,7 @@ import logging -import allensdk.core.json_utilities as json_utilities import argschema as ags +import ipfx.json_utilities as json_utilities from ipfx._schemas import SweepExtractionParameters from ipfx.dataset.create import create_ephys_data_set diff --git a/ipfx/bin/validate_experiment.py b/ipfx/bin/validate_experiment.py index 11f3bc98..2c227b70 100755 --- a/ipfx/bin/validate_experiment.py +++ b/ipfx/bin/validate_experiment.py @@ -1,8 +1,8 @@ -import allensdk.core.json_utilities as ju -import numpy as np import sys import os import logging +import numpy as np +import ipfx.json_utilities as ju def nullisclose(a, b): diff --git a/ipfx/data_set_utils.py b/ipfx/data_set_utils.py old mode 100644 new mode 100755 index c9828de2..64ae5aff --- a/ipfx/data_set_utils.py +++ b/ipfx/data_set_utils.py @@ -1,7 +1,7 @@ """A shim for backwards compatible imports of create_data_set """ -from allensdk.deprecated import deprecated +from ipfx.deprecated import deprecated from ipfx.dataset.create import create_ephys_data_set create_data_set = deprecated( # type: ignore diff --git a/ipfx/dataset/create.py b/ipfx/dataset/create.py old mode 100644 new mode 100755 index 89295bc6..0dc8c468 --- a/ipfx/dataset/create.py +++ b/ipfx/dataset/create.py @@ -5,7 +5,7 @@ import h5py import numpy as np -import allensdk.core.json_utilities as ju +import ipfx.json_utilities as ju from ipfx.dataset.ephys_data_set import EphysDataSet from ipfx.stimulus import StimulusOntology @@ -17,7 +17,7 @@ def get_scalar_value(dataset_from_nwb): """ - Some values in NWB are stored as scalar whereas others as np.ndarrays with + Some values in NWB are stored as scalar whereas others as np.ndarrays with dimension 1. Use this function to retrieve the scalar value itself. """ @@ -67,7 +67,7 @@ def get_nwb_version(nwb_file: str) -> Dict[str, Any]: nwb_version_str = to_str(nwb_version) if nwb_version is not None and re.match("^NWB-", nwb_version_str): return { - "major": int(nwb_version_str[4]), + "major": int(nwb_version_str[4]), "full": nwb_version_str } diff --git a/ipfx/dataset/ephys_data_set.py b/ipfx/dataset/ephys_data_set.py old mode 100644 new mode 100755 index 953ef259..14f68167 --- a/ipfx/dataset/ephys_data_set.py +++ b/ipfx/dataset/ephys_data_set.py @@ -8,8 +8,7 @@ import pandas as pd import numpy as np -from allensdk.deprecated import deprecated - +from ipfx.deprecated import deprecated from ipfx.dataset.ephys_data_interface import EphysDataInterface from ipfx.stimulus import StimulusOntology from ipfx.sweep import Sweep, SweepSet @@ -37,15 +36,15 @@ class EphysDataSet(object): @property def ontology(self) -> StimulusOntology: - """The stimulus ontology maps codified description of the stimulus type + """The stimulus ontology maps codified description of the stimulus type to the human-readable descriptions. """ return self._data.ontology @property def sweep_table(self) -> pd.DataFrame: - """Each row of the sweep table contains the metadata for a single - sweep. In particular details of the stimulus presented and the clamp + """Each row of the sweep table contains the metadata for a single + sweep. In particular details of the stimulus presented and the clamp mode. See EphysDataInterface.get_sweep_metadata for more information. """ @@ -76,7 +75,7 @@ def sweep_info(self, value): self._sweep_info[sweep["sweep_number"]] = sweep else: self._sweep_info = value - + if hasattr(self, "_sweep_table"): del self._sweep_table @@ -85,21 +84,21 @@ def __init__( data: EphysDataInterface, sweep_info: Optional[List[Dict]] = None ): - """EphysDataSet is the preferred interface for running analyses or + """EphysDataSet is the preferred interface for running analyses or pipeline code. Parameters ---------- - data : This object must implement the EphysDataInterface. It will - handle any loading of data from external sources (such as NWB2 + data : This object must implement the EphysDataInterface. It will + handle any loading of data from external sources (such as NWB2 files) """ self._data: EphysDataInterface = data self.sweep_info = sweep_info or [] def _setup_stimulus_repeat_lookup(self): - """Each sweep contains the ith repetition of some stimulus (from 1 -> - the number of times that stimulus was presented). Find i for each + """Each sweep contains the ith repetition of some stimulus (from 1 -> + the number of times that stimulus was presented). Find i for each sweep. Notes @@ -142,16 +141,16 @@ def filtered_sweep_table( if stimuli: mask = st[self.STIMULUS_CODE].apply( - self.ontology.stimulus_has_any_tags, - args=(stimuli,), + self.ontology.stimulus_has_any_tags, + args=(stimuli,), tag_type="code" ) st = st[mask.astype(bool)] if stimuli_exclude: mask = ~st[self.STIMULUS_CODE].apply( - self.ontology.stimulus_has_any_tags, - args=(stimuli_exclude,), + self.ontology.stimulus_has_any_tags, + args=(stimuli_exclude,), tag_type="code" ) st = st[mask.astype(bool)] @@ -192,7 +191,7 @@ def get_sweep_number( stimuli: Collection[str], clamp_mode: Optional[str] = None ) -> int: - """Convenience for getting the integer identifier of the temporally + """Convenience for getting the integer identifier of the temporally latest sweep matching argued criteria. Parameters @@ -208,7 +207,7 @@ def get_sweep_number( def sweep(self, sweep_number: int) -> Sweep: """ - Create an instance of the Sweep class with the data loaded from the + Create an instance of the Sweep class with the data loaded from the from a file Parameters @@ -229,8 +228,8 @@ def sweep(self, sweep_number: int) -> Sweep: voltage, current = type(self)._voltage_current( sweep_data["stimulus"], - sweep_data["response"], - sweep_metadata["clamp_mode"], + sweep_data["response"], + sweep_metadata["clamp_mode"], enforce_equal_length=True, ) @@ -252,15 +251,15 @@ def sweep(self, sweep_number: int) -> Sweep: return sweep def sweep_set( - self, + self, sweep_numbers: Union[Sequence[int], int, None] = None ) -> SweepSet: - """Construct a SweepSet object, which offers convenient access to an + """Construct a SweepSet object, which offers convenient access to an ordered collection of sweeps. Parameters ---------- - sweep_numbers : Identifiers for the sweeps which will make up this set. + sweep_numbers : Identifiers for the sweeps which will make up this set. If None, use all available sweeps. Returns @@ -312,12 +311,12 @@ def get_sweep_data(self, sweep_number: int) -> Dict: return sweep_data def get_clamp_mode(self, sweep_number: int) -> str: - """Obtain the clamp mode of a given sweep. Should be one of + """Obtain the clamp mode of a given sweep. Should be one of EphysDataSet.VOLTAGE_CLAMP or EphysDataSet.CURRENT_CLAMP Parameters ---------- - sweep_number : identifier for the sweep whose clamp mode will be + sweep_number : identifier for the sweep whose clamp mode will be returned Returns @@ -331,7 +330,7 @@ def get_stimulus_code(self, sweep_number: int) -> str: Parameters ---------- - sweep_number : identifier for the sweep whose stimulus code will be + sweep_number : identifier for the sweep whose stimulus code will be returned Returns @@ -341,14 +340,14 @@ def get_stimulus_code(self, sweep_number: int) -> str: return self._data.get_stimulus_code(sweep_number) def get_stimulus_code_ext(self, sweep_number: int) -> str: - """Obtain the extended stimulus code for a sweep. This is the stimulus - code for that sweep augmented with an integer counter describing the - number of presentations of that stimulus up to and including the + """Obtain the extended stimulus code for a sweep. This is the stimulus + code for that sweep augmented with an integer counter describing the + number of presentations of that stimulus up to and including the requested sweep. Parameters ---------- - sweep_number : identifies the sweep whose extended stimulus code will + sweep_number : identifies the sweep whose extended stimulus code will be returned Returns @@ -367,7 +366,7 @@ def get_stimulus_units(self, sweep_number: int) -> str: Parameters ---------- - sweep_number : identifies the sweep whose stimulus unit will be + sweep_number : identifies the sweep whose stimulus unit will be returned Returns @@ -380,11 +379,11 @@ def get_stimulus_units(self, sweep_number: int) -> str: def _voltage_current( cls, stimulus: np.ndarray, - response: np.ndarray, + response: np.ndarray, clamp_mode: str, enforce_equal_length: bool = True ) -> Tuple[np.array, np.array]: - """Resolve the stimulus and response arrays from a sweep's data into + """Resolve the stimulus and response arrays from a sweep's data into voltage and current, using the clamp mode as a guide Parameters @@ -392,14 +391,14 @@ def _voltage_current( stimulus : stimulus trace response : response trace clamp_mode : Used to map stimulus and response to voltage and current - enforce_equal_length : Raise a ValueError if the stimulus and + enforce_equal_length : Raise a ValueError if the stimulus and response arrays have uneven numbers of samples Returns ------- The voltage and current traces. - """ + """ if clamp_mode == cls.VOLTAGE_CLAMP: voltage = stimulus @@ -419,10 +418,10 @@ def _voltage_current( return voltage, current def _nan_trailing_zeros( - array: np.ndarray, + array: np.ndarray, inplace: bool = False ) -> np.ndarray: - """If an array ends with one or more zeros, replace those zeros with + """If an array ends with one or more zeros, replace those zeros with np.nan """ @@ -434,4 +433,4 @@ def _nan_trailing_zeros( return array array[nonzero[-1] + 1:] = np.nan - return array \ No newline at end of file + return array diff --git a/ipfx/deprecated.py b/ipfx/deprecated.py new file mode 100755 index 00000000..4a39016e --- /dev/null +++ b/ipfx/deprecated.py @@ -0,0 +1,106 @@ +# Allen Institute Software License - This software license is the 2-clause BSD +# license plus a third clause that prohibits redistribution for commercial +# purposes without further permission. +# +# Copyright 2017. Allen Institute. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3. Redistributions for commercial purposes are not permitted without the +# Allen Institute's written permission. +# For purposes of this license, commercial purposes is the incorporation of the +# Allen Institute's software into anything for which you will charge fees or +# other compensation. Contact terms@alleninstitute.org for commercial licensing +# opportunities. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# +import copy +import warnings +import functools + +try: + from numpy import VisibleDeprecationWarning +except (ModuleNotFoundError, ImportError): + from numpy.exceptions import VisibleDeprecationWarning + +def deprecated(message=None): + + if message is None: + message = '' + + def output_decorator(fn): + + @functools.wraps(fn) + def wrapper(*args, **kwargs): + + warnings.warn("Function {0} is deprecated. {1}".format( + fn.__name__, message), + category=VisibleDeprecationWarning, stacklevel=2) + + return fn(*args, **kwargs) + + return wrapper + + return output_decorator + + +def class_deprecated(message=None): + + if message is None: + message = '' + + def output_class_decorator(cls): + + fn_copy = copy.deepcopy(cls.__init__) + + @functools.wraps(cls.__init__) + def wrapper(*args, **kwargs): + warnings.warn("Class {0} is deprecated. {1}".format( + cls.__name__, message), + category=VisibleDeprecationWarning, stacklevel=2) + fn_copy(*args, **kwargs) + + cls.__init__ = wrapper + return cls + + return output_class_decorator + + +def legacy(message=None): + + if message is None: + message = '' + + def output_decorator(fn): + + @functools.wraps(fn) + def wrapper(*args, **kwargs): + + warnings.warn("Function {0} is provided for backward-compatibilty with a legacy API, and may be removed in the future. {1}".format( + fn.__name__, message), + category=VisibleDeprecationWarning, stacklevel=2) + + return fn(*args, **kwargs) + + return wrapper + + return output_decorator diff --git a/ipfx/ephys_data_set.py b/ipfx/ephys_data_set.py old mode 100644 new mode 100755 index 0d55339b..c829789e --- a/ipfx/ephys_data_set.py +++ b/ipfx/ephys_data_set.py @@ -1,7 +1,7 @@ """A shim for backwards compatible imports of EphysDataSet """ -from allensdk.deprecated import class_deprecated +from ipfx.deprecated import class_deprecated from ipfx.dataset.ephys_data_set import EphysDataSet EphysDataSet = class_deprecated( # type: ignore diff --git a/ipfx/json_utilities.py b/ipfx/json_utilities.py new file mode 100755 index 00000000..e56761e5 --- /dev/null +++ b/ipfx/json_utilities.py @@ -0,0 +1,62 @@ +import logging +import numpy as np +import simplejson as json + +ju_logger = logging.getLogger(__name__) + + +def read(file_name): + """Shortcut reading JSON from a file.""" + with open(file_name, "rb") as f: + json_string = f.read().decode("utf-8") + if len(json_string) == 0: # If empty file + # Create a string that will give an empty JSON object instead of an + # error + json_string = "{}" + json_obj = json.loads(json_string) + + return json_obj + + +def write(file_name, obj): + """Shortcut for writing JSON to a file. This also takes care of + serializing numpy and data types.""" + with open(file_name, "wb") as f: + try: + f.write(write_string(obj)) + except TypeError: + f.write(bytes(write_string(obj), "utf-8")) + + +def write_string(obj): + """Shortcut for writing JSON to a string. This also takes care of + serializing numpy and data types.""" + return json.dumps( + obj, + indent=2, + ignore_nan=True, + default=json_handler, + iterable_as_array=True, + ) + + +def json_handler(obj): + """Used by write_string convert a few non-standard types to things that the + json package can handle.""" + if hasattr(obj, "to_dict"): + return obj.to_dict() + elif isinstance(obj, np.ndarray): + return obj.tolist() + elif isinstance(obj, np.floating): + return float(obj) + elif isinstance(obj, np.integer): + return int(obj) + elif isinstance(obj, bool) or isinstance(obj, np.bool_): + return bool(obj) + elif hasattr(obj, "isoformat"): + return obj.isoformat() + else: + raise TypeError( + "Object of type %s with value of %s is not JSON serializable" + % (type(obj), repr(obj)) + ) diff --git a/ipfx/lab_notebook_reader.py b/ipfx/lab_notebook_reader.py old mode 100644 new mode 100755 index 390bd707..0d81b7b6 --- a/ipfx/lab_notebook_reader.py +++ b/ipfx/lab_notebook_reader.py @@ -1,7 +1,7 @@ """A shim for backwards compatible imports of lab_notebook_reader """ -from allensdk.deprecated import class_deprecated +from ipfx.deprecated import class_deprecated from ipfx.dataset.labnotebook import LabNotebookReader LabNotebookReader = class_deprecated( # type: ignore diff --git a/ipfx/lims_queries.py b/ipfx/lims_queries.py index a2aec607..eee6f01e 100755 --- a/ipfx/lims_queries.py +++ b/ipfx/lims_queries.py @@ -2,9 +2,6 @@ import logging import pg8000 -from allensdk.core.authentication import credential_injector -from allensdk.core.auth_config import LIMS_DB_CREDENTIAL_MAP - from ipfx.string_utils import to_str @@ -19,15 +16,34 @@ TIMEOUT = float(TIMEOUT) # type: ignore -@credential_injector(LIMS_DB_CREDENTIAL_MAP) -def _connect(user, host, dbname, password, port, timeout=TIMEOUT): +LIMS_DB_CREDENTIAL_MAP = { + "dbname": "LIMS_DBNAME", + "user": "LIMS_USER", + "host": "LIMS_HOST", + "password": "LIMS_PASSWORD", + "port": "LIMS_PORT" +} + + +LIMS_DB_CREDENTIAL_DEFAULTS = { + "LIMS_DBNAME": None, + "LIMS_USER": None, + "LIMS_HOST": None, + "LIMS_PORT": 5432, +} + + +def _connect(timeout=TIMEOUT): + # Get credentials from environment variables + credentials = dict((k, os.environ.get(env_var, LIMS_DB_CREDENTIAL_DEFAULTS[env_var])) + for k, env_var in LIMS_DB_CREDENTIAL_MAP.items()) conn = pg8000.connect( - user=user, - host=host, - database=dbname, - password=password, - port=int(port), + user=credentials["user"], + host=credentials["host"], + database=credentials["dbname"], + password=credentials["password"], + port=int(credentials["port"]), timeout=timeout ) return conn, conn.cursor() @@ -42,7 +58,7 @@ def able_to_connect_to_lims(): except pg8000.Error: # the connection failed return False - except TypeError: + except (TypeError, KeyError): # a credential was missing return False diff --git a/ipfx/plot_qc_figures.py b/ipfx/plot_qc_figures.py old mode 100644 new mode 100755 index da361bd1..e7b295c3 --- a/ipfx/plot_qc_figures.py +++ b/ipfx/plot_qc_figures.py @@ -11,7 +11,7 @@ import datetime import matplotlib.pyplot as plt import glob -from allensdk.config.manifest import Manifest +from pathlib import Path import matplotlib matplotlib.use('agg') @@ -743,6 +743,57 @@ def exp_curve(x, a, inv_tau, y0): return y0 + a * np.exp(-inv_tau * x) +def safe_mkdir(directory): + '''Create path if not already there. + + Parameters + ---------- + directory : string + create it if it doesn't exist + + Returns + ------- + leftmost : string + most rootward directory created + + ''' + + parts = Path(directory).parts + sub_paths = [Path(parts[0])] + for part in parts[1:]: + sub_paths.append(sub_paths[-1] / part) + + leftmost = None + for sub_path in sub_paths: + if not sub_path.exists(): + leftmost = str(sub_path) + + try: + os.makedirs(directory) + except OSError as e: + if ((sys.platform == "darwin") and (e.errno == errno.EISDIR) and \ + (e.filename == "/")): + # undocumented behavior of mkdir on OSX where for / it raises + # EISDIR and not EEXIST + # https://bugs.python.org/issue24231 (old but still holds true) + pass + elif sys.platform == "win32" and e.errno == errno.EACCES: + root_path = os.path.abspath(os.sep) + if e.filename == root_path or \ + e.filename == root_path.replace("\\", "/"): + # When attempting to os.makedirs the root drive letter on + # Windows, EACCES is raised, not EEXIST + pass + else: + raise + elif e.errno == errno.EEXIST: + pass + else: + raise + + return leftmost + + def display_features(qc_fig_dir, data_set, feature_data): """ @@ -763,8 +814,8 @@ def display_features(qc_fig_dir, data_set, feature_data): shutil.rmtree(qc_fig_dir) image_dir = os.path.join(qc_fig_dir,"img") - Manifest.safe_mkdir(qc_fig_dir) - Manifest.safe_mkdir(image_dir) + safe_mkdir(qc_fig_dir) + safe_mkdir(image_dir) logging.info("Saving figures") make_sweep_page(data_set, qc_fig_dir) diff --git a/ipfx/script_utils.py b/ipfx/script_utils.py old mode 100644 new mode 100755 index f7ec36e5..042433e7 --- a/ipfx/script_utils.py +++ b/ipfx/script_utils.py @@ -7,8 +7,6 @@ import pandas as pd import h5py -from allensdk.core.cell_types_cache import CellTypesCache - import ipfx.lims_queries as lq import ipfx.stim_features as stf import ipfx.stimulus_protocol_analysis as spa @@ -48,13 +46,6 @@ def lims_nwb_information(specimen_id): return nwb_path, h5_path -def sdk_nwb_information(specimen_id): - ctc = CellTypesCache() - nwb_data_set = ctc.get_ephys_data(specimen_id) - sweep_info = ctc.get_ephys_sweeps(specimen_id) - return nwb_data_set.file_name, sweep_info - - def dataset_for_specimen_id(specimen_id, data_source, ontology, file_list=None): if data_source == "lims": nwb_path, h5_path = lims_nwb_information(specimen_id) @@ -69,15 +60,6 @@ def dataset_for_specimen_id(specimen_id, data_source, ontology, file_list=None): logging.warning("Exception when loading specimen {:d} from LIMS".format(specimen_id)) logging.warning(detail) return {"error": {"type": "dataset", "details": traceback.format_exc(limit=None)}} - elif data_source == "sdk": - nwb_path, sweep_info = sdk_nwb_information(specimen_id) - try: - data_set = create_ephys_data_set( - nwb_file=nwb_path, sweep_info=sweep_info, ontology=ontology) - except Exception as detail: - logging.warning("Exception when loading specimen {:d} via Allen SDK".format(specimen_id)) - logging.warning(detail) - return {"error": {"type": "dataset", "details": traceback.format_exc(limit=None)}} elif data_source == "filesystem": nwb_path = file_list[specimen_id] try: diff --git a/ipfx/stimulus.py b/ipfx/stimulus.py old mode 100644 new mode 100755 index 64c2f6ae..562c2695 --- a/ipfx/stimulus.py +++ b/ipfx/stimulus.py @@ -2,7 +2,7 @@ import logging import warnings -import allensdk.core.json_utilities as ju +import ipfx.json_utilities as ju from enum import Enum @@ -92,7 +92,7 @@ def has_tag(self, tag, tag_type=None): class StimulusOntology(object): DEFAULT_STIMULUS_ONTOLOGY_FILE = os.path.join( - os.path.dirname(__file__), + os.path.dirname(__file__), "defaults", "stimulus_ontology.json" ) diff --git a/ipfx/utilities.py b/ipfx/utilities.py index e9718689..69a5cf11 100644 --- a/ipfx/utilities.py +++ b/ipfx/utilities.py @@ -6,6 +6,8 @@ from ipfx.stimulus import StimulusOntology from ipfx.dataset.ephys_data_set import EphysDataSet +from pynwb.icephys import SweepTable +import pynwb def drop_failed_sweeps( dataset: EphysDataSet, @@ -41,3 +43,12 @@ def drop_failed_sweeps( dataset.sweep_info = sweep_features +def inject_sweep_table(nwbfile: pynwb.NWBFile): + """ + Allows us to keep using the SweepTable which can not be constructed anymore in pynwb 3.0. + """ + + sweep_table = SweepTable.__new__(SweepTable, parent=nwbfile, in_construct_mode=True) + sweep_table.__init__(name='sweep_table') + sweep_table._in_construct_mode = False + nwbfile.sweep_table = sweep_table diff --git a/ipfx/version.txt b/ipfx/version.txt old mode 100644 new mode 100755 index 359a5b95..7ec1d6db --- a/ipfx/version.txt +++ b/ipfx/version.txt @@ -1 +1 @@ -2.0.0 \ No newline at end of file +2.1.0 diff --git a/ipfx/x_to_nwb/ABFConverter.py b/ipfx/x_to_nwb/ABFConverter.py index b84a23cc..89238506 100644 --- a/ipfx/x_to_nwb/ABFConverter.py +++ b/ipfx/x_to_nwb/ABFConverter.py @@ -21,6 +21,7 @@ from ipfx.x_to_nwb.conversion_utils import PLACEHOLDER, V_CLAMP_MODE, I_CLAMP_MODE, I0_CLAMP_MODE, \ parseUnit, getStimulusSeriesClass, getAcquiredSeriesClass, createSeriesName, convertDataset, \ getPackageInfo, createCycleID +from ipfx.utilities import inject_sweep_table log = logging.getLogger(__name__) @@ -76,7 +77,9 @@ def __init__(self, inFileOrFolder, outFile, outputFeedbackChannel, compression=T nwbFile.add_device(device) electrodes = self._createElectrodes(device) - nwbFile.add_ic_electrode(electrodes) + nwbFile.add_icephys_electrode(electrodes) + + inject_sweep_table(nwbfile) for i in self._createStimulusSeries(electrodes): nwbFile.add_stimulus(i, use_sweep_table=True) diff --git a/ipfx/x_to_nwb/DatConverter.py b/ipfx/x_to_nwb/DatConverter.py index cec26233..fd95806b 100644 --- a/ipfx/x_to_nwb/DatConverter.py +++ b/ipfx/x_to_nwb/DatConverter.py @@ -16,6 +16,7 @@ from ipfx.x_to_nwb.conversion_utils import PLACEHOLDER, V_CLAMP_MODE, I_CLAMP_MODE, \ parseUnit, getStimulusSeriesClass, getAcquiredSeriesClass, createSeriesName, convertDataset, \ getPackageInfo, getStimulusRecordIndex, createCycleID, clampModeToString +from ipfx.utilities import inject_sweep_table log = logging.getLogger(__name__) @@ -68,7 +69,9 @@ def generateList(multipleGroupsPerFile, pul): self.electrodeDict = DatConverter._generateElectrodeDict(elem) electrodes = self._createElectrodes(device) - nwbFile.add_ic_electrode(electrodes) + nwbFile.add_icephys_electrode(electrodes) + + inject_sweep_table(nwbfile) for i in self._createAcquiredSeries(electrodes, elem): nwbFile.add_acquisition(i, use_sweep_table=True) diff --git a/ipfx/x_to_nwb/conversion_utils.py b/ipfx/x_to_nwb/conversion_utils.py old mode 100644 new mode 100755 index d631b222..e64d41e1 --- a/ipfx/x_to_nwb/conversion_utils.py +++ b/ipfx/x_to_nwb/conversion_utils.py @@ -4,7 +4,6 @@ """ import math -from pkg_resources import get_distribution, DistributionNotFound import os from subprocess import Popen, PIPE @@ -124,7 +123,7 @@ def convertDataset(array, compression): def getPackageInfo(): """ - Return a dictionary with version information for the allensdk package + Return a dictionary with version information for the ipfx package """ def get_git_version(): @@ -143,10 +142,7 @@ def get_git_version(): return f"({branch}) {rev}" - try: - package_version = get_distribution('allensdk').version - except DistributionNotFound: # not installed as a package - package_version = None + package_version = __version__ try: git_version = get_git_version() diff --git a/requirements.txt b/requirements.txt old mode 100644 new mode 100755 index e5d8da72..b34f70ef --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,3 @@ -allensdk argschema dictdiffer h5py @@ -10,7 +9,7 @@ pandas pg8000 pillow pyabf -pynwb==2.2.0 +pynwb pyYAML ruamel.yaml<0.18.0 scipy diff --git a/setup.py b/setup.py index 12f6e559..1b7eb848 100644 --- a/setup.py +++ b/setup.py @@ -69,7 +69,11 @@ def run(self): "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Topic :: Scientific/Engineering :: Bio-Informatics" ], cmdclass={'check_version': CheckVersionCommand} diff --git a/tests/attach_metadata/test_cli.py b/tests/attach_metadata/test_cli.py index 094fd906..2c025def 100644 --- a/tests/attach_metadata/test_cli.py +++ b/tests/attach_metadata/test_cli.py @@ -12,7 +12,7 @@ import pynwb import numpy as np import pytest - +from ipfx.utilities import inject_sweep_table class CliRunner: @@ -56,6 +56,9 @@ def simple_nwb(base_path): identifier='test session', session_start_time=datetime.now() ) + + inject_sweep_table(nwbfile) + nwbfile.add_acquisition( pynwb.TimeSeries( name="a timeseries", diff --git a/tests/attach_metadata/test_nwb2_sink.py b/tests/attach_metadata/test_nwb2_sink.py index 567c0686..1df7a1e6 100644 --- a/tests/attach_metadata/test_nwb2_sink.py +++ b/tests/attach_metadata/test_nwb2_sink.py @@ -16,6 +16,7 @@ import h5py from ipfx.attach_metadata.sink import nwb2_sink +from ipfx.utilities import inject_sweep_table @pytest.fixture @@ -32,7 +33,7 @@ def nwbfile(): device=dev, description="" ) - _nwbfile.add_ic_electrode(ice) + _nwbfile.add_icephys_electrode(ice) series = pynwb.icephys.CurrentClampSeries( name="a current clamp", data=[1, 2, 3], @@ -42,6 +43,7 @@ def nwbfile(): electrode=ice, sweep_number=12 ) + inject_sweep_table(_nwbfile) _nwbfile.add_acquisition(series, use_sweep_table=True) _nwbfile.subject = pynwb.file.Subject() diff --git a/tests/dataset/test_ephys_nwb_data.py b/tests/dataset/test_ephys_nwb_data.py index 4cec15d3..0b0d046d 100644 --- a/tests/dataset/test_ephys_nwb_data.py +++ b/tests/dataset/test_ephys_nwb_data.py @@ -1,11 +1,12 @@ import pytest from ipfx.stimulus import StimulusOntology from ipfx.dataset.ephys_nwb_data import EphysNWBData -import allensdk.core.json_utilities as ju +import ipfx.json_utilities as ju import datetime import pynwb from pynwb.icephys import CurrentClampStimulusSeries, CurrentClampSeries import numpy as np +from ipfx.utilities import inject_sweep_table from dictdiffer import diff @@ -43,7 +44,7 @@ def nwbfile_to_test(): stimulus_meta_data = { "name": "stimulus", "sweep_number": 4, - "unit": "A", + "unit": "amperes", "gain": 32.0, "resolution": 1.0, "conversion": 1.0E-3, @@ -58,13 +59,14 @@ def nwbfile_to_test(): **stimulus_meta_data ) + inject_sweep_table(nwbfile) nwbfile.add_stimulus(stimulus_series, use_sweep_table=True) response_data = [1, 2, 3, 4, 5] response_meta_data = { "name":"acquisition", "sweep_number": 4, - "unit": "V", + "unit": "volts", "gain": 32.0, "resolution": 1.0, "conversion": 1.0E-3, diff --git a/tests/test_append_nwb.py b/tests/test_append_nwb.py index a2c3b04a..73ae5ba6 100644 --- a/tests/test_append_nwb.py +++ b/tests/test_append_nwb.py @@ -16,7 +16,7 @@ def make_skeleton_nwb2_file(nwb2_file_name): ) device = nwbfile.create_device(name='electrode_0') - nwbfile.create_ic_electrode( + nwbfile.create_icephys_electrode( name="elec0", description='intracellular electrode', device=device diff --git a/tests/test_feature_vector.py b/tests/test_feature_vector.py old mode 100644 new mode 100755 index 2a65bab5..b922b00e --- a/tests/test_feature_vector.py +++ b/tests/test_feature_vector.py @@ -5,7 +5,7 @@ import ipfx.feature_vectors as fv from ipfx.stimulus import StimulusOntology from ipfx.sweep import Sweep, SweepSet -import allensdk.core.json_utilities as ju +import ipfx.json_utilities as ju import pytest diff --git a/tests/test_mies_nwb_pipeline_output.py b/tests/test_mies_nwb_pipeline_output.py old mode 100644 new mode 100755 index a8303752..d2122ceb --- a/tests/test_mies_nwb_pipeline_output.py +++ b/tests/test_mies_nwb_pipeline_output.py @@ -3,15 +3,15 @@ import pytest import pandas as pd import os -import allensdk.core.json_utilities as ju +import ipfx.json_utilities as ju from ipfx.bin.run_pipeline import run_pipeline -from pkg_resources import resource_filename +import importlib.resources from dictdiffer import diff -TEST_SPECIMENS_FILE = resource_filename(__name__, 'test_mies_nwb2_specimens.csv') - -test_specimens = pd.read_csv(TEST_SPECIMENS_FILE, sep=" ") +ref = importlib.resources.files('ipfx') / '../tests/test_mies_nwb2_specimens.csv' +with importlib.resources.as_file(ref) as path: + test_specimens = pd.read_csv(path, sep=" ") test_specimens_params = [tuple(sp) for sp in test_specimens.values] @@ -81,13 +81,13 @@ def test_mies_nwb_pipeline_output(input_json, output_json, tmpdir_factory): output_diff = list(diff(expected, obtained, tolerance=0.001)) - # There is a known issue with newer MIES-generated NWBs: They report - # recording date in offsetless UTC, rather than local time +- an offset to + # There is a known issue with newer MIES-generated NWBs: They report + # recording date in offsetless UTC, rather than local time +- an offset to # UTC as in the older generation. unacceptable = [] for item in output_diff: if not "recording_date" in item[1]: - unacceptable.append(item) + unacceptable.append(item) if unacceptable: print(unacceptable)