From 56d783c1f3373428a00ed9cd21e2f9e6a1df0c91 Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Thu, 17 Oct 2024 10:12:40 -0700 Subject: [PATCH 1/8] Update CHANGELOG.md --- CHANGELOG.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 16e504659..191f67e29 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ # PyNWB Changelog +## PyNWB 3.0.0 (Upcoming) + +### Breaking changes + +### Enhancements and minor changes + ## PyNWB 2.8.3 (Upcoming) ### Performance From b5e5629a8de2dd14556694355dc68ac3807d8c84 Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Wed, 23 Oct 2024 17:41:06 -0700 Subject: [PATCH 2/8] Add warnings when using positional arguments in container constructors (#1972) * add positional argument warning * update tests to use kwargs * update CHANGELOG.md --- CHANGELOG.md | 1 + src/pynwb/base.py | 20 +-- src/pynwb/behavior.py | 9 +- src/pynwb/core.py | 8 +- src/pynwb/device.py | 5 +- src/pynwb/ecephys.py | 17 ++- src/pynwb/epoch.py | 5 +- src/pynwb/file.py | 16 ++- src/pynwb/icephys.py | 81 +++++++----- src/pynwb/image.py | 20 ++- src/pynwb/misc.py | 19 +-- src/pynwb/ogen.py | 8 +- src/pynwb/ophys.py | 23 ++-- src/pynwb/resources.py | 5 +- tests/integration/hdf5/test_image.py | 4 +- tests/integration/hdf5/test_misc.py | 2 +- tests/integration/hdf5/test_nwbfile.py | 2 +- tests/unit/test_ecephys.py | 91 ++++++++++---- tests/unit/test_epoch_legacy.py | 5 +- tests/unit/test_file.py | 76 ++++++++---- tests/unit/test_icephys.py | 137 ++++++++++++++++----- tests/unit/test_icephys_metadata_tables.py | 12 +- tests/unit/test_image.py | 4 +- tests/unit/test_misc.py | 18 ++- tests/unit/test_ogen.py | 2 +- 25 files changed, 400 insertions(+), 190 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 191f67e29..065ebc0c5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,7 @@ ### Breaking changes ### Enhancements and minor changes +- Added warnings when using positional arguments in `Container` constructor methods. Positional arguments will raise errors in the next major release. @stephprince [#1972](https://github.com/NeurodataWithoutBorders/pynwb/pull/1972) ## PyNWB 2.8.3 (Upcoming) diff --git a/src/pynwb/base.py b/src/pynwb/base.py index 8b4daf48f..0bc5d22d7 100644 --- a/src/pynwb/base.py +++ b/src/pynwb/base.py @@ -6,7 +6,7 @@ from hdmf.utils import docval, popargs_to_dict, get_docval, popargs from hdmf.common import DynamicTable, VectorData -from hdmf.utils import get_data_shape +from hdmf.utils import get_data_shape, AllowPositional from . import register_class, CORE_NAMESPACE from .core import NWBDataInterface, MultiContainerInterface, NWBData @@ -33,7 +33,8 @@ class ProcessingModule(MultiContainerInterface): @docval({'name': 'name', 'type': str, 'doc': 'The name of this processing module'}, {'name': 'description', 'type': str, 'doc': 'Description of this processing module'}, {'name': 'data_interfaces', 'type': (list, tuple, dict), - 'doc': 'NWBDataInterfaces that belong to this ProcessingModule', 'default': None}) + 'doc': 'NWBDataInterfaces that belong to this ProcessingModule', 'default': None}, + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): description, data_interfaces = popargs("description", "data_interfaces", kwargs) super().__init__(**kwargs) @@ -143,7 +144,8 @@ class TimeSeries(NWBDataInterface): 'distinct moments in time. Times of image presentations would be "step" because the picture ' 'remains the same until the next time-point. This field is optional, but is useful in providing ' 'information about the underlying data. It may inform the way this data is interpreted, the way it ' - 'is visualized, and what analysis methods are applicable.'}) + 'is visualized, and what analysis methods are applicable.'}, + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): """Create a TimeSeries object """ @@ -375,7 +377,8 @@ class Image(NWBData): {'name': 'data', 'type': ('array_data', 'data'), 'doc': 'data of image. Dimensions: x, y [, r,g,b[,a]]', 'shape': ((None, None), (None, None, 3), (None, None, 4))}, {'name': 'resolution', 'type': float, 'doc': 'pixels / cm', 'default': None}, - {'name': 'description', 'type': str, 'doc': 'description of image', 'default': None}) + {'name': 'description', 'type': str, 'doc': 'description of image', 'default': None}, + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): args_to_set = popargs_to_dict(("resolution", "description"), kwargs) super().__init__(**kwargs) @@ -392,7 +395,8 @@ class ImageReferences(NWBData): __nwbfields__ = ('data', ) @docval({'name': 'name', 'type': str, 'doc': 'The name of this ImageReferences object.'}, - {'name': 'data', 'type': 'array_data', 'doc': 'The images in order.'},) + {'name': 'data', 'type': 'array_data', 'doc': 'The images in order.'}, + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): # NOTE we do not use the docval shape validator here because it will recognize a list of P MxN images as # having shape (P, M, N) @@ -424,7 +428,8 @@ class Images(MultiContainerInterface): {'name': 'images', 'type': 'array_data', 'doc': 'image objects', 'default': None}, {'name': 'description', 'type': str, 'doc': 'description of images', 'default': 'no description'}, {'name': 'order_of_images', 'type': ImageReferences, - 'doc': 'Ordered dataset of references to Image objects stored in the parent group.', 'default': None},) + 'doc': 'Ordered dataset of references to Image objects stored in the parent group.', 'default': None}, + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): args_to_set = popargs_to_dict(("description", "images", "order_of_images"), kwargs) @@ -617,7 +622,8 @@ class TimeSeriesReferenceVectorData(VectorData): 'default': "Column storing references to a TimeSeries (rows). For each TimeSeries this " "VectorData column stores the start_index and count to indicate the range in time " "to be selected as well as an object reference to the TimeSeries."}, - *get_docval(VectorData.__init__, 'data')) + *get_docval(VectorData.__init__, 'data'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): super().__init__(**kwargs) # CAUTION: Define any logic specific for init in the self._init_internal function, not here! diff --git a/src/pynwb/behavior.py b/src/pynwb/behavior.py index 1b3078535..170c9bc6c 100644 --- a/src/pynwb/behavior.py +++ b/src/pynwb/behavior.py @@ -1,6 +1,6 @@ import warnings -from hdmf.utils import docval, popargs, get_docval, get_data_shape +from hdmf.utils import docval, popargs, get_docval, get_data_shape, AllowPositional from . import register_class, CORE_NAMESPACE from .core import MultiContainerInterface @@ -33,13 +33,14 @@ class SpatialSeries(TimeSeries): {'name': 'unit', 'type': str, 'doc': 'The base unit of measurement (should be SI unit)', 'default': 'meters'}, *get_docval(TimeSeries.__init__, 'conversion', 'resolution', 'timestamps', 'starting_time', 'rate', - 'comments', 'description', 'control', 'control_description', 'offset')) + 'comments', 'description', 'control', 'control_description', 'offset'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): """ Create a SpatialSeries TimeSeries dataset """ - name, data, bounds, reference_frame, unit = popargs('name', 'data', 'bounds', 'reference_frame', 'unit', kwargs) - super().__init__(name, data, unit, **kwargs) + name, data, bounds, reference_frame = popargs('name', 'data', 'bounds', 'reference_frame', kwargs) + super().__init__(name=name, data=data, **kwargs) # NWB 2.5 restricts length of second dimension to be <= 3 allowed_data_shapes = ((None, ), (None, 1), (None, 2), (None, 3)) diff --git a/src/pynwb/core.py b/src/pynwb/core.py index f9ae2bd2f..59f849db8 100644 --- a/src/pynwb/core.py +++ b/src/pynwb/core.py @@ -6,7 +6,7 @@ from hdmf.container import AbstractContainer, MultiContainerInterface as hdmf_MultiContainerInterface, Table from hdmf.common import DynamicTable, DynamicTableRegion # noqa: F401 from hdmf.common import VectorData, VectorIndex, ElementIdentifiers # noqa: F401 -from hdmf.utils import docval, popargs +from hdmf.utils import docval, popargs, AllowPositional from hdmf.utils import LabelledDict # noqa: F401 from . import CORE_NAMESPACE, register_class @@ -78,7 +78,8 @@ class NWBDataInterface(NWBContainer): class NWBData(NWBMixin, Data): @docval({'name': 'name', 'type': str, 'doc': 'the name of this container'}, - {'name': 'data', 'type': ('scalar_data', 'array_data', 'data', Data), 'doc': 'the source of the data'}) + {'name': 'data', 'type': ('scalar_data', 'array_data', 'data', Data), 'doc': 'the source of the data'}, + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): super().__init__(**kwargs) self.__data = kwargs['data'] @@ -123,7 +124,8 @@ class ScratchData(NWBData): {'name': 'data', 'type': ('scalar_data', 'array_data', 'data', Data), 'doc': 'the source of the data'}, {'name': 'notes', 'type': str, 'doc': 'notes about the data. This argument will be deprecated. Use description instead', 'default': ''}, - {'name': 'description', 'type': str, 'doc': 'notes about the data', 'default': None}) + {'name': 'description', 'type': str, 'doc': 'notes about the data', 'default': None}, + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): notes, description = popargs('notes', 'description', kwargs) super().__init__(**kwargs) diff --git a/src/pynwb/device.py b/src/pynwb/device.py index 6fcd610c8..5df75688c 100644 --- a/src/pynwb/device.py +++ b/src/pynwb/device.py @@ -1,4 +1,4 @@ -from hdmf.utils import docval, popargs +from hdmf.utils import docval, popargs, AllowPositional from . import register_class, CORE_NAMESPACE from .core import NWBContainer @@ -19,7 +19,8 @@ class Device(NWBContainer): 'doc': 'Description of the device (e.g., model, firmware version, processing software version, etc.)', 'default': None}, {'name': 'manufacturer', 'type': str, 'doc': 'the name of the manufacturer of this device', - 'default': None}) + 'default': None}, + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): description, manufacturer = popargs('description', 'manufacturer', kwargs) super().__init__(**kwargs) diff --git a/src/pynwb/ecephys.py b/src/pynwb/ecephys.py index e6dadbe97..2bb4992a5 100644 --- a/src/pynwb/ecephys.py +++ b/src/pynwb/ecephys.py @@ -4,7 +4,7 @@ from hdmf.common import DynamicTableRegion from hdmf.data_utils import DataChunkIterator, assertEqualShape -from hdmf.utils import docval, popargs, get_docval, popargs_to_dict, get_data_shape +from hdmf.utils import docval, popargs, get_docval, popargs_to_dict, get_data_shape, AllowPositional from . import register_class, CORE_NAMESPACE from .base import TimeSeries @@ -29,7 +29,8 @@ class ElectrodeGroup(NWBContainer): {'name': 'position', 'type': 'array_data', 'doc': 'Compound dataset with stereotaxic position of this electrode group (x, y, z). ' 'The data array must have three elements or the dtype of the ' - 'array must be ``(float, float, float)``', 'default': None}) + 'array must be ``(float, float, float)``', 'default': None}, + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): args_to_set = popargs_to_dict(('description', 'location', 'device', 'position'), kwargs) super().__init__(**kwargs) @@ -91,7 +92,8 @@ class ElectricalSeries(TimeSeries): "filter is unknown, then this value could be 'Low-pass filter at 300 Hz'. If a non-standard filter " "type is used, provide as much detail about the filter properties as possible.", 'default': None}, *get_docval(TimeSeries.__init__, 'resolution', 'conversion', 'timestamps', 'starting_time', 'rate', - 'comments', 'description', 'control', 'control_description', 'offset')) + 'comments', 'description', 'control', 'control_description', 'offset'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): args_to_set = popargs_to_dict(('electrodes', 'channel_conversion', 'filtering'), kwargs) @@ -134,7 +136,8 @@ class SpikeEventSeries(ElectricalSeries): 'doc': 'Timestamps for samples stored in data'}, *get_docval(ElectricalSeries.__init__, 'electrodes'), # required *get_docval(ElectricalSeries.__init__, 'resolution', 'conversion', 'comments', 'description', 'control', - 'control_description', 'offset')) + 'control_description', 'offset'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): data = kwargs['data'] timestamps = kwargs['timestamps'] @@ -169,7 +172,8 @@ class EventDetection(NWBDataInterface): '(e.g., .25msec before action potential peak, zero-crossing time, etc). ' 'The index points to each event from the raw data'}, {'name': 'times', 'type': ('array_data', 'data'), 'doc': 'Timestamps of events, in Seconds'}, - {'name': 'name', 'type': str, 'doc': 'the name of this container', 'default': 'EventDetection'}) + {'name': 'name', 'type': str, 'doc': 'the name of this container', 'default': 'EventDetection'}, + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): args_to_set = popargs_to_dict(('detection_method', 'source_electricalseries', 'source_idx', 'times'), kwargs) super().__init__(**kwargs) @@ -320,7 +324,8 @@ class FeatureExtraction(NWBDataInterface): 'doc': 'The times of events that features correspond to'}, {'name': 'features', 'type': ('array_data', 'data'), 'shape': (None, None, None), 'doc': 'Features for each channel'}, - {'name': 'name', 'type': str, 'doc': 'the name of this container', 'default': 'FeatureExtraction'}) + {'name': 'name', 'type': str, 'doc': 'the name of this container', 'default': 'FeatureExtraction'}, + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): # get the inputs electrodes, description, times, features = popargs( diff --git a/src/pynwb/epoch.py b/src/pynwb/epoch.py index 9cccc5db5..1db797462 100644 --- a/src/pynwb/epoch.py +++ b/src/pynwb/epoch.py @@ -2,7 +2,7 @@ from hdmf.data_utils import DataIO from hdmf.common import DynamicTable -from hdmf.utils import docval, getargs, popargs, get_docval +from hdmf.utils import docval, getargs, popargs, get_docval, AllowPositional from . import register_class, CORE_NAMESPACE from .base import TimeSeries, TimeSeriesReferenceVectorData, TimeSeriesReference @@ -27,7 +27,8 @@ class TimeIntervals(DynamicTable): @docval({'name': 'name', 'type': str, 'doc': 'name of this TimeIntervals'}, # required {'name': 'description', 'type': str, 'doc': 'Description of this TimeIntervals', 'default': "experimental intervals"}, - *get_docval(DynamicTable.__init__, 'id', 'columns', 'colnames')) + *get_docval(DynamicTable.__init__, 'id', 'columns', 'colnames'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): super().__init__(**kwargs) diff --git a/src/pynwb/file.py b/src/pynwb/file.py index 7621df2ac..9d0f4d154 100644 --- a/src/pynwb/file.py +++ b/src/pynwb/file.py @@ -45,7 +45,8 @@ class LabMetaData(NWBContainer): tutorial. """ - @docval({'name': 'name', 'type': str, 'doc': 'name of lab metadata'}) + @docval({'name': 'name', 'type': str, 'doc': 'name of lab metadata'}, + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): super().__init__(**kwargs) @@ -107,6 +108,7 @@ class Subject(NWBContainer): {'name': 'date_of_birth', 'type': datetime, 'default': None, 'doc': 'The datetime of the date of birth. May be supplied instead of age.'}, {'name': 'strain', 'type': str, 'doc': 'The strain of the subject, e.g., "C57BL/6J"', 'default': None}, + allow_positional=AllowPositional.WARNING, ) def __init__(self, **kwargs): keys_to_set = ( @@ -940,7 +942,8 @@ def get_icephys_simultaneous_recordings(self): will return None if the table is currently not being used. """ if self.icephys_simultaneous_recordings is None: - self.icephys_simultaneous_recordings = SimultaneousRecordingsTable(self.get_intracellular_recordings()) + self.icephys_simultaneous_recordings = SimultaneousRecordingsTable( + intracellular_recordings_table=self.get_intracellular_recordings()) return self.icephys_simultaneous_recordings @docval(*get_docval(SimultaneousRecordingsTable.add_simultaneous_recording), @@ -963,7 +966,8 @@ def get_icephys_sequential_recordings(self): will return None if the table is currently not being used. """ if self.icephys_sequential_recordings is None: - self.icephys_sequential_recordings = SequentialRecordingsTable(self.get_icephys_simultaneous_recordings()) + self.icephys_sequential_recordings = SequentialRecordingsTable( + simultaneous_recordings_table=self.get_icephys_simultaneous_recordings()) return self.icephys_sequential_recordings @docval(*get_docval(SequentialRecordingsTable.add_sequential_recording), @@ -987,7 +991,8 @@ def get_icephys_repetitions(self): will return None if the table is currently not being used. """ if self.icephys_repetitions is None: - self.icephys_repetitions = RepetitionsTable(self.get_icephys_sequential_recordings()) + self.icephys_repetitions = RepetitionsTable( + sequential_recordings_table=self.get_icephys_sequential_recordings()) return self.icephys_repetitions @docval(*get_docval(RepetitionsTable.add_repetition), @@ -1010,7 +1015,8 @@ def get_icephys_experimental_conditions(self): will return None if the table is currently not being used. """ if self.icephys_experimental_conditions is None: - self.icephys_experimental_conditions = ExperimentalConditionsTable(self.get_icephys_repetitions()) + self.icephys_experimental_conditions = ExperimentalConditionsTable( + repetitions_table=self.get_icephys_repetitions()) return self.icephys_experimental_conditions @docval(*get_docval(ExperimentalConditionsTable.add_experimental_condition), diff --git a/src/pynwb/icephys.py b/src/pynwb/icephys.py index 3101649fd..4042260d3 100644 --- a/src/pynwb/icephys.py +++ b/src/pynwb/icephys.py @@ -4,7 +4,7 @@ import numpy as np from hdmf.common import DynamicTable, AlignedDynamicTable -from hdmf.utils import docval, popargs, popargs_to_dict, get_docval, getargs +from hdmf.utils import docval, popargs, popargs_to_dict, get_docval, getargs, AllowPositional from . import register_class, CORE_NAMESPACE from .base import TimeSeries, TimeSeriesReferenceVectorData @@ -51,7 +51,8 @@ class IntracellularElectrode(NWBContainer): {'name': 'resistance', 'type': str, 'doc': 'Electrode resistance, unit - Ohm.', 'default': None}, {'name': 'filtering', 'type': str, 'doc': 'Electrode specific filtering.', 'default': None}, {'name': 'initial_access_resistance', 'type': str, 'doc': 'Initial access resistance.', 'default': None}, - {'name': 'cell_id', 'type': str, 'doc': 'Unique ID of cell.', 'default': None} + {'name': 'cell_id', 'type': str, 'doc': 'Unique ID of cell.', 'default': None}, + allow_positional=AllowPositional.WARNING, ) def __init__(self, **kwargs): keys_to_set = ( @@ -134,12 +135,13 @@ class PatchClampSeries(TimeSeries): 'doc': 'Sweep number, allows for grouping different PatchClampSeries ' 'together via the sweep_table', 'default': None, - } + }, + allow_positional=AllowPositional.WARNING, ) def __init__(self, **kwargs): - name, data, unit, stimulus_description = popargs('name', 'data', 'unit', 'stimulus_description', kwargs) - electrode, gain, sweep_number = popargs('electrode', 'gain', 'sweep_number', kwargs) - super().__init__(name, data, unit, **kwargs) + electrode, gain, stimulus_description, sweep_number = popargs('electrode', 'gain', 'stimulus_description', + 'sweep_number', kwargs) + super().__init__(**kwargs) self.electrode = electrode self.gain = gain self.stimulus_description = stimulus_description @@ -172,13 +174,14 @@ class CurrentClampSeries(PatchClampSeries): *get_docval(PatchClampSeries.__init__, 'resolution', 'conversion', 'timestamps', 'starting_time', 'rate', 'comments', 'description', 'control', 'control_description', 'sweep_number', 'offset'), {'name': 'unit', 'type': str, 'doc': "The base unit of measurement (must be 'volts')", - 'default': 'volts'}) + 'default': 'volts'}, + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): - name, data, unit, electrode, gain = popargs('name', 'data', 'unit', 'electrode', 'gain', kwargs) + name, unit = popargs('name', 'unit', kwargs) unit = ensure_unit(self, name, unit, 'volts', '2.1.0') bias_current, bridge_balance, capacitance_compensation = popargs( 'bias_current', 'bridge_balance', 'capacitance_compensation', kwargs) - super().__init__(name, data, unit, electrode, gain, **kwargs) + super().__init__(name=name, unit=unit, **kwargs) self.bias_current = bias_current self.bridge_balance = bridge_balance self.capacitance_compensation = capacitance_compensation @@ -205,14 +208,15 @@ class IZeroClampSeries(CurrentClampSeries): 'starting_time', 'rate', 'comments', 'description', 'control', 'control_description', 'sweep_number', 'offset'), {'name': 'unit', 'type': str, 'doc': "The base unit of measurement (must be 'volts')", - 'default': 'volts'}) + 'default': 'volts'}, + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): - name, data, electrode, gain = popargs('name', 'data', 'electrode', 'gain', kwargs) + name, stimulus_description = popargs('name', 'stimulus_description', kwargs) bias_current, bridge_balance, capacitance_compensation = (0.0, 0.0, 0.0) - stimulus_description = popargs('stimulus_description', kwargs) stimulus_description = self._ensure_stimulus_description(name, stimulus_description, 'N/A', '2.3.0') kwargs['stimulus_description'] = stimulus_description - super().__init__(name, data, electrode, gain, bias_current, bridge_balance, capacitance_compensation, + super().__init__(name=name, bias_current=bias_current, bridge_balance=bridge_balance, + capacitance_compensation=capacitance_compensation, **kwargs) def _ensure_stimulus_description(self, name, current_stim_desc, stim_desc, nwb_version): @@ -243,11 +247,12 @@ class CurrentClampStimulusSeries(PatchClampSeries): 'starting_time', 'rate', 'comments', 'description', 'control', 'control_description', 'sweep_number', 'offset'), {'name': 'unit', 'type': str, 'doc': "The base unit of measurement (must be 'amperes')", - 'default': 'amperes'}) + 'default': 'amperes'}, + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): - name, data, unit, electrode, gain = popargs('name', 'data', 'unit', 'electrode', 'gain', kwargs) + name, unit = popargs('name', 'unit', kwargs) unit = ensure_unit(self, name, unit, 'amperes', '2.1.0') - super().__init__(name, data, unit, electrode, gain, **kwargs) + super().__init__(name=name, unit=unit, **kwargs) @register_class('VoltageClampSeries', CORE_NAMESPACE) @@ -279,16 +284,17 @@ class VoltageClampSeries(PatchClampSeries): *get_docval(PatchClampSeries.__init__, 'resolution', 'conversion', 'timestamps', 'starting_time', 'rate', 'comments', 'description', 'control', 'control_description', 'sweep_number', 'offset'), {'name': 'unit', 'type': str, 'doc': "The base unit of measurement (must be 'amperes')", - 'default': 'amperes'}) + 'default': 'amperes'}, + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): - name, data, unit, electrode, gain = popargs('name', 'data', 'unit', 'electrode', 'gain', kwargs) - unit = ensure_unit(self, name, unit, 'amperes', '2.1.0') - capacitance_fast, capacitance_slow, resistance_comp_bandwidth, resistance_comp_correction, \ + name, unit, capacitance_fast, capacitance_slow, resistance_comp_bandwidth, resistance_comp_correction, \ resistance_comp_prediction, whole_cell_capacitance_comp, whole_cell_series_resistance_comp = popargs( - 'capacitance_fast', 'capacitance_slow', 'resistance_comp_bandwidth', + 'name', 'unit', 'capacitance_fast', 'capacitance_slow', 'resistance_comp_bandwidth', 'resistance_comp_correction', 'resistance_comp_prediction', 'whole_cell_capacitance_comp', 'whole_cell_series_resistance_comp', kwargs) - super().__init__(name, data, unit, electrode, gain, **kwargs) + unit = ensure_unit(self, name, unit, 'amperes', '2.1.0') + + super().__init__(name=name, unit=unit, **kwargs) self.capacitance_fast = capacitance_fast self.capacitance_slow = capacitance_slow self.resistance_comp_bandwidth = resistance_comp_bandwidth @@ -312,11 +318,12 @@ class VoltageClampStimulusSeries(PatchClampSeries): 'starting_time', 'rate', 'comments', 'description', 'control', 'control_description', 'sweep_number', 'offset'), {'name': 'unit', 'type': str, 'doc': "The base unit of measurement (must be 'volts')", - 'default': 'volts'}) + 'default': 'volts'}, + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): - name, data, unit, electrode, gain = popargs('name', 'data', 'unit', 'electrode', 'gain', kwargs) + name, unit = popargs('name', 'unit', kwargs) unit = ensure_unit(self, name, unit, 'volts', '2.1.0') - super().__init__(name, data, unit, electrode, gain, **kwargs) + super().__init__(name=name, unit=unit, **kwargs) @register_class('SweepTable', CORE_NAMESPACE) @@ -394,7 +401,8 @@ class IntracellularElectrodesTable(DynamicTable): 'table': False}, ) - @docval(*get_docval(DynamicTable.__init__, 'id', 'columns', 'colnames')) + @docval(*get_docval(DynamicTable.__init__, 'id', 'columns', 'colnames'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): # Define defaultb name and description settings kwargs['name'] = 'electrodes' @@ -423,7 +431,8 @@ class IntracellularStimuliTable(DynamicTable): 'class': TimeSeriesReferenceVectorData}, ) - @docval(*get_docval(DynamicTable.__init__, 'id', 'columns', 'colnames')) + @docval(*get_docval(DynamicTable.__init__, 'id', 'columns', 'colnames'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): # Define defaultb name and description settings kwargs['name'] = 'stimuli' @@ -446,7 +455,8 @@ class IntracellularResponsesTable(DynamicTable): 'class': TimeSeriesReferenceVectorData}, ) - @docval(*get_docval(DynamicTable.__init__, 'id', 'columns', 'colnames')) + @docval(*get_docval(DynamicTable.__init__, 'id', 'columns', 'colnames'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): # Define defaultb name and description settings kwargs['name'] = 'responses' @@ -462,7 +472,8 @@ class IntracellularRecordingsTable(AlignedDynamicTable): a single simultaneous_recording. Each row in the table represents a single recording consisting typically of a stimulus and a corresponding response. """ - @docval(*get_docval(AlignedDynamicTable.__init__, 'id', 'columns', 'colnames', 'category_tables', 'categories')) + @docval(*get_docval(AlignedDynamicTable.__init__, 'id', 'columns', 'colnames', 'category_tables', 'categories'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): kwargs['name'] = 'intracellular_recordings' kwargs['description'] = ('A table to group together a stimulus and response from a single electrode ' @@ -747,7 +758,8 @@ class SimultaneousRecordingsTable(DynamicTable): 'reading the Container from file as the table attribute is already populated in this case ' 'but otherwise this is required.', 'default': None}, - *get_docval(DynamicTable.__init__, 'id', 'columns', 'colnames')) + *get_docval(DynamicTable.__init__, 'id', 'columns', 'colnames'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): intracellular_recordings_table = popargs('intracellular_recordings_table', kwargs) # Define default name and description settings @@ -806,7 +818,8 @@ class SequentialRecordingsTable(DynamicTable): 'column indexes. May be None when reading the Container from file as the ' 'table attribute is already populated in this case but otherwise this is required.', 'default': None}, - *get_docval(DynamicTable.__init__, 'id', 'columns', 'colnames')) + *get_docval(DynamicTable.__init__, 'id', 'columns', 'colnames'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): simultaneous_recordings_table = popargs('simultaneous_recordings_table', kwargs) # Define defaultb name and description settings @@ -863,7 +876,8 @@ class RepetitionsTable(DynamicTable): 'be None when reading the Container from file as the table attribute is already populated ' 'in this case but otherwise this is required.', 'default': None}, - *get_docval(DynamicTable.__init__, 'id', 'columns', 'colnames')) + *get_docval(DynamicTable.__init__, 'id', 'columns', 'colnames'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): sequential_recordings_table = popargs('sequential_recordings_table', kwargs) # Define default name and description settings @@ -915,7 +929,8 @@ class ExperimentalConditionsTable(DynamicTable): 'type': RepetitionsTable, 'doc': 'the RepetitionsTable table that the repetitions column indexes', 'default': None}, - *get_docval(DynamicTable.__init__, 'id', 'columns', 'colnames')) + *get_docval(DynamicTable.__init__, 'id', 'columns', 'colnames'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): repetitions_table = popargs('repetitions_table', kwargs) # Define default name and description settings diff --git a/src/pynwb/image.py b/src/pynwb/image.py index 518ec8a8c..e8adf8ed8 100644 --- a/src/pynwb/image.py +++ b/src/pynwb/image.py @@ -10,6 +10,7 @@ popargs_to_dict, get_docval, get_data_shape, + AllowPositional, ) from . import register_class, CORE_NAMESPACE @@ -61,7 +62,8 @@ class ImageSeries(TimeSeries): *get_docval(TimeSeries.__init__, 'resolution', 'conversion', 'timestamps', 'starting_time', 'rate', 'comments', 'description', 'control', 'control_description', 'offset'), {'name': 'device', 'type': Device, - 'doc': 'Device used to capture the images/video.', 'default': None},) + 'doc': 'Device used to capture the images/video.', 'default': None}, + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): keys_to_set = ('bits_per_pixel', 'dimension', 'external_file', 'starting_frame', 'format', 'device') args_to_set = popargs_to_dict(keys_to_set, kwargs) @@ -251,6 +253,7 @@ class IndexSeries(TimeSeries): 'control_description', 'offset', ), + allow_positional=AllowPositional.WARNING, ) def __init__(self, **kwargs): indexed_timeseries, indexed_images = popargs('indexed_timeseries', 'indexed_images', kwargs) @@ -297,7 +300,8 @@ class ImageMaskSeries(ImageSeries): {'name': 'device', 'type': Device, 'doc': ('Device used to capture the mask data. This field will likely not be needed. ' 'The device used to capture the masked ImageSeries data should be stored in the ImageSeries.'), - 'default': None},) + 'default': None}, + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): masked_imageseries = popargs('masked_imageseries', kwargs) super().__init__(**kwargs) @@ -333,7 +337,8 @@ class OpticalSeries(ImageSeries): 'default': None}, *get_docval(ImageSeries.__init__, 'unit', 'format', 'external_file', 'starting_frame', 'bits_per_pixel', 'dimension', 'resolution', 'conversion', 'timestamps', 'starting_time', 'rate', 'comments', - 'description', 'control', 'control_description', 'device', 'offset')) + 'description', 'control', 'control_description', 'device', 'offset'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): distance, field_of_view, orientation = popargs('distance', 'field_of_view', 'orientation', kwargs) super().__init__(**kwargs) @@ -349,7 +354,8 @@ class GrayscaleImage(Image): {'name': 'data', 'type': ('array_data', 'data'), 'doc': 'Data of grayscale image. Must be 2D where the dimensions represent x and y.', 'shape': (None, None)}, - *get_docval(Image.__init__, 'resolution', 'description')) + *get_docval(Image.__init__, 'resolution', 'description'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): super().__init__(**kwargs) @@ -362,7 +368,8 @@ class RGBImage(Image): 'doc': 'Data of color image. Must be 3D where the first and second dimensions represent x and y. ' 'The third dimension has length 3 and represents the RGB value.', 'shape': (None, None, 3)}, - *get_docval(Image.__init__, 'resolution', 'description')) + *get_docval(Image.__init__, 'resolution', 'description'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): super().__init__(**kwargs) @@ -375,6 +382,7 @@ class RGBAImage(Image): 'doc': 'Data of color image with transparency. Must be 3D where the first and second dimensions ' 'represent x and y. The third dimension has length 4 and represents the RGBA value.', 'shape': (None, None, 4)}, - *get_docval(Image.__init__, 'resolution', 'description')) + *get_docval(Image.__init__, 'resolution', 'description'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): super().__init__(**kwargs) diff --git a/src/pynwb/misc.py b/src/pynwb/misc.py index 14c2e08d1..118f2c444 100644 --- a/src/pynwb/misc.py +++ b/src/pynwb/misc.py @@ -4,7 +4,7 @@ import numpy as np -from hdmf.utils import docval, getargs, popargs, popargs_to_dict, get_docval +from hdmf.utils import docval, getargs, popargs, popargs_to_dict, get_docval, AllowPositional from . import register_class, CORE_NAMESPACE from .base import TimeSeries @@ -25,7 +25,8 @@ class AnnotationSeries(TimeSeries): {'name': 'data', 'type': ('array_data', 'data', TimeSeries), 'shape': (None,), 'doc': 'The annotations over time. Must be 1D.', 'default': list()}, - *get_docval(TimeSeries.__init__, 'timestamps', 'comments', 'description')) + *get_docval(TimeSeries.__init__, 'timestamps', 'comments', 'description'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): name, data, timestamps = popargs('name', 'data', 'timestamps', kwargs) super().__init__(name=name, data=data, unit='n/a', resolution=-1.0, timestamps=timestamps, **kwargs) @@ -63,7 +64,8 @@ class AbstractFeatureSeries(TimeSeries): 'dimension represents features'), 'default': list()}, *get_docval(TimeSeries.__init__, 'resolution', 'conversion', 'timestamps', 'starting_time', 'rate', - 'comments', 'description', 'control', 'control_description', 'offset')) + 'comments', 'description', 'control', 'control_description', 'offset'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): name, data, features, feature_units = popargs('name', 'data', 'features', 'feature_units', kwargs) @@ -100,7 +102,8 @@ class IntervalSeries(TimeSeries): 'doc': ('The data values. Must be 1D, where the first dimension must be time. Values are >0 if ' 'interval started, <0 if interval ended.'), 'default': list()}, - *get_docval(TimeSeries.__init__, 'timestamps', 'comments', 'description', 'control', 'control_description')) + *get_docval(TimeSeries.__init__, 'timestamps', 'comments', 'description', 'control', 'control_description'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): name, data, timestamps = popargs('name', 'data', 'timestamps', kwargs) self.__interval_timestamps = timestamps @@ -164,7 +167,8 @@ class Units(DynamicTable): {'name': 'waveform_unit', 'type': str, 'doc': 'Unit of measurement of the waveform means', 'default': 'volts'}, {'name': 'resolution', 'type': float, - 'doc': 'The smallest possible difference between two spike times', 'default': None} + 'doc': 'The smallest possible difference between two spike times', 'default': None}, + allow_positional=AllowPositional.WARNING, ) def __init__(self, **kwargs): args_to_set = popargs_to_dict(("waveform_rate", "waveform_unit", "resolution"), kwargs) @@ -198,7 +202,7 @@ def __init__(self, **kwargs): {'name': 'waveforms', 'type': 'array_data', 'default': None, 'doc': waveforms_desc, 'shape': ((None, None), (None, None, None))}, {'name': 'id', 'type': int, 'default': None, 'doc': 'the id for each unit'}, - allow_extra=True) + allow_extra=True,) def add_unit(self, **kwargs): """ Add a unit to this table @@ -277,7 +281,8 @@ class DecompositionSeries(TimeSeries): 'similar to ElectricalSeries.electrodes.'), 'default': None}, *get_docval(TimeSeries.__init__, 'resolution', 'conversion', 'timestamps', 'starting_time', 'rate', - 'comments', 'control', 'control_description', 'offset')) + 'comments', 'control', 'control_description', 'offset'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): metric, source_timeseries, bands, source_channels = popargs('metric', 'source_timeseries', 'bands', 'source_channels', kwargs) diff --git a/src/pynwb/ogen.py b/src/pynwb/ogen.py index af11842e4..a1370b4b0 100644 --- a/src/pynwb/ogen.py +++ b/src/pynwb/ogen.py @@ -1,4 +1,4 @@ -from hdmf.utils import docval, popargs, get_docval, popargs_to_dict +from hdmf.utils import docval, popargs, get_docval, popargs_to_dict, AllowPositional from . import register_class, CORE_NAMESPACE from .base import TimeSeries @@ -19,7 +19,8 @@ class OptogeneticStimulusSite(NWBContainer): {'name': 'device', 'type': Device, 'doc': 'The device that was used.'}, {'name': 'description', 'type': str, 'doc': 'Description of site.'}, {'name': 'excitation_lambda', 'type': float, 'doc': 'Excitation wavelength in nm.'}, - {'name': 'location', 'type': str, 'doc': 'Location of stimulation site.'}) + {'name': 'location', 'type': str, 'doc': 'Location of stimulation site.'}, + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): args_to_set = popargs_to_dict(('device', 'description', 'excitation_lambda', 'location'), kwargs) super().__init__(**kwargs) @@ -43,7 +44,8 @@ class OptogeneticSeries(TimeSeries): {'name': 'site', 'type': OptogeneticStimulusSite, # required 'doc': 'The site to which this stimulus was applied.'}, *get_docval(TimeSeries.__init__, 'resolution', 'conversion', 'timestamps', 'starting_time', 'rate', - 'comments', 'description', 'control', 'control_description', 'offset')) + 'comments', 'description', 'control', 'control_description', 'offset'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): site = popargs('site', kwargs) kwargs['unit'] = 'watts' diff --git a/src/pynwb/ophys.py b/src/pynwb/ophys.py index 6f1483079..aecba765d 100644 --- a/src/pynwb/ophys.py +++ b/src/pynwb/ophys.py @@ -3,7 +3,7 @@ import warnings from hdmf.common import DynamicTable, DynamicTableRegion -from hdmf.utils import docval, popargs, get_docval, get_data_shape, popargs_to_dict +from hdmf.utils import docval, popargs, get_docval, get_data_shape, popargs_to_dict, AllowPositional from . import register_class, CORE_NAMESPACE from .base import TimeSeries @@ -21,7 +21,8 @@ class OpticalChannel(NWBContainer): @docval({'name': 'name', 'type': str, 'doc': 'the name of this electrode'}, # required {'name': 'description', 'type': str, 'doc': 'Any notes or comments about the channel.'}, # required - {'name': 'emission_lambda', 'type': float, 'doc': 'Emission wavelength for channel, in nm.'}) # required + {'name': 'emission_lambda', 'type': float, 'doc': 'Emission wavelength for channel, in nm.'}, # required + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): description, emission_lambda = popargs("description", "emission_lambda", kwargs) super().__init__(**kwargs) @@ -90,7 +91,8 @@ class ImagingPlane(NWBContainer): 'default': None}, {'name': 'grid_spacing_unit', 'type': str, 'doc': "Measurement units for grid_spacing. The default value is 'meters'.", - 'default': 'meters'}) + 'default': 'meters'}, + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): keys_to_set = ('optical_channel', 'description', @@ -188,7 +190,8 @@ class OnePhotonSeries(ImageSeries): "control_description", "device", "offset", - ) + ), + allow_positional=AllowPositional.WARNING, ) def __init__(self, **kwargs): keys_to_set = ( @@ -227,7 +230,8 @@ class TwoPhotonSeries(ImageSeries): 'default': None}, *get_docval(ImageSeries.__init__, 'external_file', 'starting_frame', 'bits_per_pixel', 'dimension', 'resolution', 'conversion', 'timestamps', 'starting_time', 'rate', - 'comments', 'description', 'control', 'control_description', 'device', 'offset')) + 'comments', 'description', 'control', 'control_description', 'device', 'offset'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): keys_to_set = ("field_of_view", "imaging_plane", @@ -260,7 +264,8 @@ class CorrectedImageStack(NWBDataInterface): 'doc': 'Link to image series that is being registered.'}, {'name': 'xy_translation', 'type': TimeSeries, 'doc': 'Stores the x,y delta necessary to align each frame to the common coordinates, ' - 'for example, to align each frame to a reference image. This must have the name "xy_translation".'}) + 'for example, to align each frame to a reference image. This must have the name "xy_translation".'}, + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): corrected, original, xy_translation = popargs('corrected', 'original', 'xy_translation', kwargs) super().__init__(**kwargs) @@ -312,7 +317,8 @@ class PlaneSegmentation(DynamicTable): {'name': 'name', 'type': str, 'doc': 'name of PlaneSegmentation.', 'default': None}, {'name': 'reference_images', 'type': (ImageSeries, list, dict, tuple), 'default': None, 'doc': 'One or more image stacks that the masks apply to (can be oneelement stack).'}, - *get_docval(DynamicTable.__init__, 'id', 'columns', 'colnames')) + *get_docval(DynamicTable.__init__, 'id', 'columns', 'colnames'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): imaging_plane, reference_images = popargs('imaging_plane', 'reference_images', kwargs) if kwargs['name'] is None: @@ -425,7 +431,8 @@ class RoiResponseSeries(TimeSeries): {'name': 'rois', 'type': DynamicTableRegion, # required 'doc': 'a table region corresponding to the ROIs that were used to generate this data'}, *get_docval(TimeSeries.__init__, 'resolution', 'conversion', 'timestamps', 'starting_time', 'rate', - 'comments', 'description', 'control', 'control_description', 'offset')) + 'comments', 'description', 'control', 'control_description', 'offset'), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): rois = popargs('rois', kwargs) diff --git a/src/pynwb/resources.py b/src/pynwb/resources.py index acdc22b12..1158922bb 100644 --- a/src/pynwb/resources.py +++ b/src/pynwb/resources.py @@ -1,6 +1,6 @@ from hdmf.common import HERD as hdmf_HERD from . import get_type_map as tm -from hdmf.utils import docval, get_docval +from hdmf.utils import docval, get_docval, AllowPositional class HERD(hdmf_HERD): @@ -8,7 +8,8 @@ class HERD(hdmf_HERD): HDMF External Resources Data Structure. A table for mapping user terms (i.e. keys) to resource entities. """ - @docval(*get_docval(hdmf_HERD.__init__)) + @docval(*get_docval(hdmf_HERD.__init__), + allow_positional=AllowPositional.WARNING,) def __init__(self, **kwargs): kwargs['type_map'] = tm() super().__init__(**kwargs) diff --git a/tests/integration/hdf5/test_image.py b/tests/integration/hdf5/test_image.py index 4cc731d41..178d92a05 100644 --- a/tests/integration/hdf5/test_image.py +++ b/tests/integration/hdf5/test_image.py @@ -9,7 +9,7 @@ class TestImageSeriesIO(AcquisitionH5IOMixin, TestCase): def setUpContainer(self): """ Return a test ImageSeries to read/write """ - self.dev1 = Device('dev1') + self.dev1 = Device(name='dev1') iS = ImageSeries( name='test_iS', unit='unit', @@ -31,7 +31,7 @@ class TestOpticalSeriesIO(NWBH5IOMixin, TestCase): def setUpContainer(self): """ Return a test OpticalSeries to read/write """ - self.dev1 = Device('dev1') + self.dev1 = Device(name='dev1') self.optical_series = OpticalSeries( name='OpticalSeries', distance=8., diff --git a/tests/integration/hdf5/test_misc.py b/tests/integration/hdf5/test_misc.py index cd9ab1706..fe0ded502 100644 --- a/tests/integration/hdf5/test_misc.py +++ b/tests/integration/hdf5/test_misc.py @@ -75,7 +75,7 @@ class TestUnitsFileIO(NWBH5IOMixin, TestCase): def setUpContainer(self): """ Return placeholder Units object. Tested units are added directly to the NWBFile in addContainer """ - return Units('placeholder') # this will get ignored + return Units(name='placeholder') # this will get ignored def addContainer(self, nwbfile): """ Add units to the given NWBFile """ diff --git a/tests/integration/hdf5/test_nwbfile.py b/tests/integration/hdf5/test_nwbfile.py index e164ec649..a9de78e5f 100644 --- a/tests/integration/hdf5/test_nwbfile.py +++ b/tests/integration/hdf5/test_nwbfile.py @@ -261,7 +261,7 @@ class TestEpochsIO(NWBH5IOMixin, TestCase): def setUpContainer(self): """ Return placeholder epochs object. Tested epochs are added directly to the NWBFile in addContainer """ - return TimeIntervals('epochs') + return TimeIntervals(name='epochs') def addContainer(self, nwbfile): """ Add the test epochs to the given NWBFile """ diff --git a/tests/unit/test_ecephys.py b/tests/unit/test_ecephys.py index 1415c3d30..68e1e349f 100644 --- a/tests/unit/test_ecephys.py +++ b/tests/unit/test_ecephys.py @@ -25,8 +25,11 @@ def make_electrode_table(): table = ElectrodeTable() - dev1 = Device('dev1') - group = ElectrodeGroup('tetrode1', 'tetrode description', 'tetrode location', dev1) + dev1 = Device(name='dev1') + group = ElectrodeGroup(name='tetrode1', + description='tetrode description', + location='tetrode location', + device=dev1) table.add_row(location='CA1', group=group, group_name='tetrode1') table.add_row(location='CA1', group=group, group_name='tetrode1') table.add_row(location='CA1', group=group, group_name='tetrode1') @@ -70,9 +73,10 @@ def test_init(self): def test_link(self): table, region = self._create_table_and_region() - ts1 = ElectricalSeries('test_ts1', [0, 1, 2, 3, 4, 5], region, timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5]) - ts2 = ElectricalSeries('test_ts2', ts1, region, timestamps=ts1) - ts3 = ElectricalSeries('test_ts3', ts2, region, timestamps=ts2) + ts1 = ElectricalSeries(name='test_ts1', data=[0, 1, 2, 3, 4, 5], electrodes=region, + timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5]) + ts2 = ElectricalSeries(name='test_ts2', data=ts1, electrodes=region, timestamps=ts1) + ts3 = ElectricalSeries(name='test_ts3', data=ts2, electrodes=region, timestamps=ts2) self.assertEqual(ts2.data, [0, 1, 2, 3, 4, 5]) self.assertEqual(ts2.timestamps, [0.0, 0.1, 0.2, 0.3, 0.4, 0.5]) self.assertEqual(ts3.data, [0, 1, 2, 3, 4, 5]) @@ -82,7 +86,8 @@ def test_invalid_data_shape(self): table, region = self._create_table_and_region() with self.assertRaisesWith(ValueError, ("ElectricalSeries.__init__: incorrect shape for 'data' (got '(2, 2, 2, " "2)', expected '((None,), (None, None), (None, None, None))')")): - ElectricalSeries('test_ts1', np.ones((2, 2, 2, 2)), region, timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5]) + ElectricalSeries(name='test_ts1', data=np.ones((2, 2, 2, 2)), electrodes=region, + timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5]) def test_dimensions_warning(self): table, region = self._create_table_and_region() @@ -177,7 +182,7 @@ def test_incorrect_timestamps(self): class ElectrodeGroupConstructor(TestCase): def test_init(self): - dev1 = Device('dev1') + dev1 = Device(name='dev1') group = ElectrodeGroup(name='elec1', description='electrode description', location='electrode location', @@ -191,9 +196,12 @@ def test_init(self): def test_init_position_array(self): position = np.array((1, 2, 3), dtype=np.dtype([('x', float), ('y', float), ('z', float)])) - dev1 = Device('dev1') - group = ElectrodeGroup('elec1', 'electrode description', 'electrode location', dev1, - position) + dev1 = Device(name='dev1') + group = ElectrodeGroup(name='elec1', + description='electrode description', + location='electrode location', + device=dev1, + position=position) self.assertEqual(group.name, 'elec1') self.assertEqual(group.description, 'electrode description') self.assertEqual(group.location, 'electrode location') @@ -201,7 +209,7 @@ def test_init_position_array(self): self.assertEqual(group.position, position) def test_init_position_none(self): - dev1 = Device('dev1') + dev1 = Device(name='dev1') group = ElectrodeGroup(name='elec1', description='electrode description', location='electrode location', @@ -213,7 +221,7 @@ def test_init_position_none(self): self.assertIsNone(group.position) def test_init_position_bad(self): - dev1 = Device('dev1') + dev1 = Device(name='dev1') with self.assertRaises(ValueError): ElectrodeGroup(name='elec1', description='electrode description', @@ -256,8 +264,11 @@ def test_init(self): data = list(range(10)) ts = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0] table, region = self._create_table_and_region() - eS = ElectricalSeries('test_eS', data, region, timestamps=ts) - eD = EventDetection('detection_method', eS, (1, 2, 3), (0.1, 0.2, 0.3)) + eS = ElectricalSeries(name='test_eS', data=data, electrodes=region, timestamps=ts) + eD = EventDetection(detection_method='detection_method', + source_electricalseries=eS, + source_idx=(1, 2, 3), + times=(0.1, 0.2, 0.3)) self.assertEqual(eD.detection_method, 'detection_method') self.assertEqual(eD.source_electricalseries, eS) self.assertEqual(eD.source_idx, (1, 2, 3)) @@ -279,7 +290,10 @@ def _create_table_and_region(self): def test_init(self): table, region = self._create_table_and_region() - sES = SpikeEventSeries('test_sES', list(range(10)), list(range(10)), region) + sES = SpikeEventSeries(name='test_sES', + data=list(range(10)), + timestamps=list(range(10)), + electrodes=region) pm = ProcessingModule(name='test_module', description='a test module') ew = EventWaveform() @@ -344,7 +358,8 @@ def _create_table_and_region(self): def test_init(self): _, region = self._create_table_and_region() - eS = ElectricalSeries('test_eS', [0, 1, 2, 3], region, timestamps=[0.1, 0.2, 0.3, 0.4]) + eS = ElectricalSeries(name='test_eS', data=[0, 1, 2, 3], + electrodes=region, timestamps=[0.1, 0.2, 0.3, 0.4]) msg = ( "The linked table for DynamicTableRegion 'electrodes' does not share " "an ancestor with the DynamicTableRegion." @@ -357,7 +372,10 @@ def test_init(self): def test_add_electrical_series(self): lfp = LFP() table, region = self._create_table_and_region() - eS = ElectricalSeries('test_eS', [0, 1, 2, 3], region, timestamps=[0.1, 0.2, 0.3, 0.4]) + eS = ElectricalSeries(name='test_eS', + data=[0, 1, 2, 3], + electrodes=region, + timestamps=[0.1, 0.2, 0.3, 0.4]) pm = ProcessingModule(name='test_module', description='a test module') pm.add(table) pm.add(lfp) @@ -379,7 +397,10 @@ def _create_table_and_region(self): def test_init(self): _, region = self._create_table_and_region() - eS = ElectricalSeries('test_eS', [0, 1, 2, 3], region, timestamps=[0.1, 0.2, 0.3, 0.4]) + eS = ElectricalSeries(name='test_eS', + data=[0, 1, 2, 3], + electrodes=region, + timestamps=[0.1, 0.2, 0.3, 0.4]) msg = ( "The linked table for DynamicTableRegion 'electrodes' does not share " "an ancestor with the DynamicTableRegion." @@ -391,7 +412,10 @@ def test_init(self): def test_add_electrical_series(self): table, region = self._create_table_and_region() - eS = ElectricalSeries('test_eS', [0, 1, 2, 3], region, timestamps=[0.1, 0.2, 0.3, 0.4]) + eS = ElectricalSeries(name='test_eS', + data=[0, 1, 2, 3], + electrodes=region, + timestamps=[0.1, 0.2, 0.3, 0.4]) pm = ProcessingModule(name='test_module', description='a test module') fe = FilteredEphys() pm.add(table) @@ -418,7 +442,10 @@ def test_init(self): table, region = self._create_table_and_region() description = ['desc1', 'desc2', 'desc3'] features = [[[0, 1, 2], [3, 4, 5]], [[6, 7, 8], [9, 10, 11]]] - fe = FeatureExtraction(region, description, event_times, features) + fe = FeatureExtraction(electrodes=region, + description=description, + times=event_times, + features=features) self.assertEqual(fe.description, description) self.assertEqual(fe.times, event_times) self.assertEqual(fe.features, features) @@ -428,7 +455,11 @@ def test_invalid_init_mismatched_event_times(self): table, region = self._create_table_and_region() description = ['desc1', 'desc2', 'desc3'] features = [[[0, 1, 2], [3, 4, 5]]] - self.assertRaises(ValueError, FeatureExtraction, region, description, event_times, features) + with self.assertRaises(ValueError): + FeatureExtraction(electrodes=region, + description=description, + times=event_times, + features=features) def test_invalid_init_mismatched_electrodes(self): event_times = [1] @@ -436,18 +467,30 @@ def test_invalid_init_mismatched_electrodes(self): region = DynamicTableRegion(name='electrodes', data=[0], description='the first electrode', table=table) description = ['desc1', 'desc2', 'desc3'] features = [[[0, 1, 2], [3, 4, 5]]] - self.assertRaises(ValueError, FeatureExtraction, region, description, event_times, features) + with self.assertRaises(ValueError): + FeatureExtraction(electrodes=region, + description=description, + times=event_times, + features=features) def test_invalid_init_mismatched_description(self): event_times = [1] table, region = self._create_table_and_region() description = ['desc1', 'desc2', 'desc3', 'desc4'] # Need 3 descriptions but give 4 features = [[[0, 1, 2], [3, 4, 5]]] - self.assertRaises(ValueError, FeatureExtraction, region, description, event_times, features) + with self.assertRaises(ValueError): + FeatureExtraction(electrodes=region, + description=description, + times=event_times, + features=features) def test_invalid_init_mismatched_description2(self): event_times = [1] table, region = self._create_table_and_region() description = ['desc1', 'desc2', 'desc3'] features = [[0, 1, 2], [3, 4, 5]] # Need 3D feature array but give only 2D array - self.assertRaises(ValueError, FeatureExtraction, region, description, event_times, features) + with self.assertRaises(ValueError): + FeatureExtraction(electrodes=region, + description=description, + times=event_times, + features=features) diff --git a/tests/unit/test_epoch_legacy.py b/tests/unit/test_epoch_legacy.py index 1f6c50f38..eebc35198 100644 --- a/tests/unit/test_epoch_legacy.py +++ b/tests/unit/test_epoch_legacy.py @@ -21,7 +21,7 @@ class TestTimeIntervalsIO(NWBH5IOMixin, TestCase): def setUpContainer(self): """ Return placeholder epochs object. Tested epochs are added directly to the NWBFile in addContainer """ - return TimeIntervals('epochs') + return TimeIntervals(name='epochs') def addContainer(self, nwbfile): """ Add the test epochs to the given NWBFile """ @@ -62,7 +62,8 @@ def getContainer(self, nwbfile): def test_legacy_format(self): description = 'a file to test writing and reading a %s' % self.container_type identifier = 'TEST_%s' % self.container_type - nwbfile = NWBFile(description, identifier, self.start_time, file_create_date=self.create_date) + nwbfile = NWBFile(session_description=description, identifier=identifier, + session_start_time=self.start_time, file_create_date=self.create_date) self.addContainer(nwbfile) # write the file diff --git a/tests/unit/test_file.py b/tests/unit/test_file.py index 8cb3415d9..aba85a355 100644 --- a/tests/unit/test_file.py +++ b/tests/unit/test_file.py @@ -135,7 +135,10 @@ def test_epoch_tags(self): tags1 = ['t1', 't2'] tags2 = ['t3', 't4'] tstamps = np.arange(1.0, 100.0, 0.1, dtype=np.float64) - ts = TimeSeries("test_ts", list(range(len(tstamps))), 'unit', timestamps=tstamps) + ts = TimeSeries(name="test_ts", + data=list(range(len(tstamps))), + unit='unit', + timestamps=tstamps) expected_tags = tags1 + tags2 self.nwbfile.add_epoch(0.0, 1.0, tags1, ts) self.nwbfile.add_epoch(0.0, 1.0, tags2, ts) @@ -155,13 +158,17 @@ def test_epoch_tags_no_table(self): self.assertEqual(set(), self.nwbfile.epoch_tags) def test_add_acquisition(self): - self.nwbfile.add_acquisition(TimeSeries('test_ts', [0, 1, 2, 3, 4, 5], - 'grams', timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5])) + self.nwbfile.add_acquisition(TimeSeries(name='test_ts', + data=[0, 1, 2, 3, 4, 5], + unit='grams', + timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5])) self.assertEqual(len(self.nwbfile.acquisition), 1) def test_add_stimulus(self): - self.nwbfile.add_stimulus(TimeSeries('test_ts', [0, 1, 2, 3, 4, 5], - 'grams', timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5])) + self.nwbfile.add_stimulus(TimeSeries(name='test_ts', + data=[0, 1, 2, 3, 4, 5], + unit='grams', + timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5])) self.assertEqual(len(self.nwbfile.stimulus), 1) def test_add_stimulus_timeseries_arg(self): @@ -202,8 +209,10 @@ def test_add_stimulus_dynamic_table(self): self.assertIs(self.nwbfile.stimulus['test_dynamic_table'], dt) def test_add_stimulus_template(self): - self.nwbfile.add_stimulus_template(TimeSeries('test_ts', [0, 1, 2, 3, 4, 5], - 'grams', timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5])) + self.nwbfile.add_stimulus_template(TimeSeries(name='test_ts', + data=[0, 1, 2, 3, 4, 5], + unit='grams', + timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5])) self.assertEqual(len(self.nwbfile.stimulus_template), 1) def test_add_stimulus_template_images(self): @@ -213,33 +222,45 @@ def test_add_stimulus_template_images(self): self.assertEqual(len(self.nwbfile.stimulus_template), 1) def test_add_analysis(self): - self.nwbfile.add_analysis(TimeSeries('test_ts', [0, 1, 2, 3, 4, 5], - 'grams', timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5])) + self.nwbfile.add_analysis(TimeSeries(name='test_ts', + data=[0, 1, 2, 3, 4, 5], + unit='grams', + timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5])) self.assertEqual(len(self.nwbfile.analysis), 1) def test_add_acquisition_check_dups(self): - self.nwbfile.add_acquisition(TimeSeries('test_ts', [0, 1, 2, 3, 4, 5], - 'grams', timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5])) + self.nwbfile.add_acquisition(TimeSeries(name='test_ts', + data=[0, 1, 2, 3, 4, 5], + unit='grams', + timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5])) with self.assertRaises(ValueError): - self.nwbfile.add_acquisition(TimeSeries('test_ts', [0, 1, 2, 3, 4, 5], - 'grams', timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5])) + self.nwbfile.add_acquisition(TimeSeries(name='test_ts', + data=[0, 1, 2, 3, 4, 5], + unit='grams', + timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5])) def test_get_acquisition_empty(self): with self.assertRaisesWith(ValueError, "acquisition of NWBFile 'root' is empty."): self.nwbfile.get_acquisition() def test_get_acquisition_multiple_elements(self): - self.nwbfile.add_acquisition(TimeSeries('test_ts1', [0, 1, 2, 3, 4, 5], - 'grams', timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5])) - self.nwbfile.add_acquisition(TimeSeries('test_ts2', [0, 1, 2, 3, 4, 5], - 'grams', timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5])) + self.nwbfile.add_acquisition(TimeSeries(name='test_ts1', + data=[0, 1, 2, 3, 4, 5], + unit='grams', + timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5])) + self.nwbfile.add_acquisition(TimeSeries(name='test_ts2', + data=[0, 1, 2, 3, 4, 5], + unit='grams', + timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5])) msg = "More than one element in acquisition of NWBFile 'root' -- must specify a name." with self.assertRaisesWith(ValueError, msg): self.nwbfile.get_acquisition() def test_add_acquisition_invalid_name(self): - self.nwbfile.add_acquisition(TimeSeries('test_ts', [0, 1, 2, 3, 4, 5], - 'grams', timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5])) + self.nwbfile.add_acquisition(TimeSeries(name='test_ts', + data=[0, 1, 2, 3, 4, 5], + unit='grams', + timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5])) msg = "\"'TEST_TS' not found in acquisition of NWBFile 'root'.\"" with self.assertRaisesWith(KeyError, msg): self.nwbfile.get_acquisition("TEST_TS") @@ -406,8 +427,10 @@ def test_add_electrode_missing_group(self): nwbfile.add_electrode(location='a', id=0) def test_all_children(self): - ts1 = TimeSeries('test_ts1', [0, 1, 2, 3, 4, 5], 'grams', timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5]) - ts2 = TimeSeries('test_ts2', [0, 1, 2, 3, 4, 5], 'grams', timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5]) + ts1 = TimeSeries(name='test_ts1', data=[0, 1, 2, 3, 4, 5], unit='grams', + timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5]) + ts2 = TimeSeries(name='test_ts2', data=[0, 1, 2, 3, 4, 5], unit='grams', + timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5]) self.nwbfile.add_acquisition(ts1) self.nwbfile.add_acquisition(ts2) name = 'example_electrode_group' @@ -429,8 +452,10 @@ def test_fail_if_source_script_file_name_without_source_script(self): source_script_file_name='nofilename') def test_get_neurodata_type(self): - ts1 = TimeSeries('test_ts1', [0, 1, 2, 3, 4, 5], 'grams', timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5]) - ts2 = TimeSeries('test_ts2', [0, 1, 2, 3, 4, 5], 'grams', timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5]) + ts1 = TimeSeries(name='test_ts1', data=[0, 1, 2, 3, 4, 5], unit='grams', + timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5]) + ts2 = TimeSeries(name='test_ts2', data=[0, 1, 2, 3, 4, 5], unit='grams', + timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5]) self.nwbfile.add_acquisition(ts1) self.nwbfile.add_acquisition(ts2) p1 = ts1.get_ancestor(neurodata_type='NWBFile') @@ -462,8 +487,9 @@ def test_copy(self): self.nwbfile.add_electrode(x=2.0, location='b', group=elecgrp) elec_region = self.nwbfile.create_electrode_table_region([1], 'name') - ts1 = TimeSeries('test_ts1', [0, 1, 2, 3, 4, 5], 'grams', timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5]) - ts2 = ElectricalSeries('test_ts2', [0, 1, 2, 3, 4, 5], + ts1 = TimeSeries(name='test_ts1', data=[0, 1, 2, 3, 4, 5], unit='grams', + timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5]) + ts2 = ElectricalSeries(name='test_ts2', data=[0, 1, 2, 3, 4, 5], electrodes=elec_region, timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5]) self.nwbfile.add_acquisition(ts1) self.nwbfile.add_acquisition(ts2) diff --git a/tests/unit/test_icephys.py b/tests/unit/test_icephys.py index e0e8332f9..3a9db9e61 100644 --- a/tests/unit/test_icephys.py +++ b/tests/unit/test_icephys.py @@ -120,16 +120,16 @@ class IntracellularElectrodeConstructor(TestCase): def test_constructor(self): device = Device(name='device_name') - elec = IntracellularElectrode('test_iS', - device, - 'description', - 'slice', - 'seal', - 'location', - 'resistance', - 'filtering', - 'initial_access_resistance', - 'this_cell') + elec = IntracellularElectrode(name='test_iS', + device=device, + description='description', + slice='slice', + seal='seal', + location='location', + resistance='resistance', + filtering='filtering', + initial_access_resistance='initial_access_resistance', + cell_id='this_cell') self.assertEqual(elec.name, 'test_iS') self.assertEqual(elec.device, device) self.assertEqual(elec.description, 'description') @@ -147,8 +147,12 @@ class PatchClampSeriesConstructor(TestCase): def test_default(self): electrode_name = GetElectrode() - pCS = PatchClampSeries('test_pCS', list(), 'unit', - electrode_name, 1.0, timestamps=list()) + pCS = PatchClampSeries(name='test_pCS', + data=list(), + unit='unit', + electrode=electrode_name, + gain=1.0, + timestamps=list()) self.assertEqual(pCS.name, 'test_pCS') self.assertEqual(pCS.unit, 'unit') self.assertEqual(pCS.electrode, electrode_name) @@ -157,8 +161,13 @@ def test_default(self): def test_sweepNumber_valid(self): electrode_name = GetElectrode() - pCS = PatchClampSeries('test_pCS', list(), 'unit', - electrode_name, 1.0, timestamps=list(), sweep_number=4711) + pCS = PatchClampSeries(name='test_pCS', + data=list(), + unit='unit', + electrode=electrode_name, + gain=1.0, + timestamps=list(), + sweep_number=4711) self.assertEqual(pCS.name, 'test_pCS') self.assertEqual(pCS.unit, 'unit') self.assertEqual(pCS.electrode, electrode_name) @@ -168,8 +177,13 @@ def test_sweepNumber_valid(self): def test_sweepNumber_valid_np(self): electrode_name = GetElectrode() - pCS = PatchClampSeries('test_pCS', list(), 'unit', - electrode_name, 1.0, timestamps=list(), sweep_number=1) + pCS = PatchClampSeries(name='test_pCS', + data=list(), + unit='unit', + electrode=electrode_name, + gain=1.0, + timestamps=list(), + sweep_number=1) self.assertEqual(pCS.name, 'test_pCS') self.assertEqual(pCS.unit, 'unit') self.assertEqual(pCS.electrode, electrode_name) @@ -179,8 +193,13 @@ def test_sweepNumber_valid_np(self): def test_sweepNumber_large_and_valid(self): electrode_name = GetElectrode() - pCS = PatchClampSeries('test_pCS', list(), 'unit', - electrode_name, 1.0, timestamps=list(), sweep_number=np.uint64(2**63-1)) + pCS = PatchClampSeries(name='test_pCS', + data=list(), + unit='unit', + electrode=electrode_name, + gain=1.0, + timestamps=list(), + sweep_number=np.uint64(2**63-1)) self.assertEqual(pCS.name, 'test_pCS') self.assertEqual(pCS.unit, 'unit') self.assertEqual(pCS.electrode, electrode_name) @@ -191,22 +210,37 @@ def test_sweepNumber_throws_with_negative(self): electrode_name = GetElectrode() with self.assertRaises(ValueError): - PatchClampSeries('test_pCS', list(), 'unit', - electrode_name, 1.0, timestamps=list(), sweep_number=-1) + PatchClampSeries(name='test_pCS', + data=list(), + unit='unit', + electrode=electrode_name, + gain=1.0, + timestamps=list(), + sweep_number=-1) def test_sweepNumber_throws_with_NaN(self): electrode_name = GetElectrode() with self.assertRaises(TypeError): - PatchClampSeries('test_pCS', list(), 'unit', - electrode_name, 1.0, timestamps=list(), sweep_number=float('nan')) + PatchClampSeries(name='test_pCS', + data=list(), + unit='unit', + electrodes=electrode_name, + gain=1.0, + timestamps=list(), + sweep_number=float('nan')) def test_sweepNumber_throws_with_Float(self): electrode_name = GetElectrode() with self.assertRaises(TypeError): - PatchClampSeries('test_pCS', list(), 'unit', - electrode_name, 1.0, timestamps=list(), sweep_number=1.5) + PatchClampSeries(name='test_pCS', + data=list(), + unit='unit', + electrodes=electrode_name, + gain=1.0, + timestamps=list(), + sweep_number=1.5) def test_data_shape(self): electrode_name = GetElectrode() @@ -227,7 +261,14 @@ class CurrentClampSeriesConstructor(TestCase): def test_init(self): electrode_name = GetElectrode() - cCS = CurrentClampSeries('test_cCS', list(), electrode_name, 1.0, "stimset", 2.0, 3.0, 4.0, + cCS = CurrentClampSeries(name='test_cCS', + data=list(), + electrode=electrode_name, + gain=1.0, + stimulus_description="stimset", + bias_current=2.0, + bridge_balance=3.0, + capacitance_compensation=4.0, timestamps=list()) self.assertEqual(cCS.name, 'test_cCS') self.assertEqual(cCS.unit, 'volts') @@ -253,7 +294,11 @@ class IZeroClampSeriesConstructor(TestCase): def test_init(self): electrode_name = GetElectrode() - iZCS = IZeroClampSeries('test_iZCS', list(), electrode_name, 1.0, timestamps=list()) + iZCS = IZeroClampSeries(name='test_iZCS', + data=list(), + electrode=electrode_name, + gain=1.0, + timestamps=list()) self.assertEqual(iZCS.name, 'test_iZCS') self.assertEqual(iZCS.unit, 'volts') self.assertEqual(iZCS.electrode, electrode_name) @@ -287,7 +332,11 @@ class CurrentClampStimulusSeriesConstructor(TestCase): def test_init(self): electrode_name = GetElectrode() - cCSS = CurrentClampStimulusSeries('test_cCSS', list(), electrode_name, 1.0, timestamps=list()) + cCSS = CurrentClampStimulusSeries(name='test_cCSS', + data=list(), + electrode=electrode_name, + gain=1.0, + timestamps=list()) self.assertEqual(cCSS.name, 'test_cCSS') self.assertEqual(cCSS.unit, 'amperes') self.assertEqual(cCSS.electrode, electrode_name) @@ -308,8 +357,19 @@ class VoltageClampSeriesConstructor(TestCase): def test_init(self): electrode_name = GetElectrode() - vCS = VoltageClampSeries('test_vCS', list(), electrode_name, - 1.0, "stimset", 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, timestamps=list()) + vCS = VoltageClampSeries(name='test_vCS', + data=list(), + electrode=electrode_name, + gain=1.0, + stimulus_description="stimset", + capacitance_fast=2.0, + capacitance_slow=3.0, + resistance_comp_bandwidth=4.0, + resistance_comp_correction=5.0, + resistance_comp_prediction=6.0, + whole_cell_capacitance_comp=7.0, + whole_cell_series_resistance_comp=8.0, + timestamps=list()) self.assertEqual(vCS.name, 'test_vCS') self.assertEqual(vCS.unit, 'amperes') self.assertEqual(vCS.electrode, electrode_name) @@ -329,8 +389,20 @@ def test_unit_warning(self): msg = "Unit 'unit' for VoltageClampSeries 'test_vCS' is ignored and will be set " \ "to 'amperes' as per NWB 2.1.0." with self.assertWarnsWith(UserWarning, msg): - vCS = VoltageClampSeries('test_vCS', list(), electrode_name, - 1.0, "stimset", 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, timestamps=list(), unit='unit') + vCS = VoltageClampSeries(name='test_vCS', + data=list(), + electrode=electrode_name, + gain=1.0, + stimulus_description="stimset", + capacitance_fast=2.0, + capacitance_slow=3.0, + resistance_comp_bandwidth=4.0, + resistance_comp_correction=5.0, + resistance_comp_prediction=6.0, + whole_cell_capacitance_comp=7.0, + whole_cell_series_resistance_comp=8.0, + timestamps=list(), + unit='unit') self.assertEqual(vCS.unit, 'amperes') @@ -339,7 +411,8 @@ class VoltageClampStimulusSeriesConstructor(TestCase): def test_init(self): electrode_name = GetElectrode() - vCSS = VoltageClampStimulusSeries('test_vCSS', list(), electrode_name, 1.0, timestamps=list()) + vCSS = VoltageClampStimulusSeries(name='test_vCSS', data=list(), electrode=electrode_name, gain=1.0, + timestamps=list()) self.assertEqual(vCSS.name, 'test_vCSS') self.assertEqual(vCSS.unit, 'volts') self.assertEqual(vCSS.electrode, electrode_name) diff --git a/tests/unit/test_icephys_metadata_tables.py b/tests/unit/test_icephys_metadata_tables.py index b31ee9215..7ed9c7432 100644 --- a/tests/unit/test_icephys_metadata_tables.py +++ b/tests/unit/test_icephys_metadata_tables.py @@ -868,7 +868,7 @@ def test_basic_write(self): sw = SimultaneousRecordingsTable(intracellular_recordings_table=ir) row_index = sw.add_simultaneous_recording(recordings=[0]) self.assertEqual(row_index, 0) - sws = SequentialRecordingsTable(sw) + sws = SequentialRecordingsTable(simultaneous_recordings_table=sw) row_index = sws.add_sequential_recording(simultaneous_recordings=[0, ], stimulus_type='MyStimStype') self.assertEqual(row_index, 0) self.write_test_helper(ir=ir, sw=sw, sws=sws) @@ -886,7 +886,7 @@ def test_enforce_unique_id(self): ) sw = SimultaneousRecordingsTable(intracellular_recordings_table=ir) sw.add_simultaneous_recording(recordings=[0]) - sws = SequentialRecordingsTable(sw) + sws = SequentialRecordingsTable(simultaneous_recordings_table=sw) sws.add_sequential_recording(simultaneous_recordings=[0, ], id=np.int64(10), stimulus_type='MyStimStype') with self.assertRaises(ValueError): sws.add_sequential_recording(simultaneous_recordings=[0, ], id=np.int64(10), stimulus_type='MyStimStype') @@ -932,7 +932,7 @@ def test_basic_write(self): sw = SimultaneousRecordingsTable(intracellular_recordings_table=ir) row_index = sw.add_simultaneous_recording(recordings=[0]) self.assertEqual(row_index, 0) - sws = SequentialRecordingsTable(sw) + sws = SequentialRecordingsTable(simultaneous_recordings_table=sw) row_index = sws.add_sequential_recording(simultaneous_recordings=[0, ], stimulus_type='MyStimStype') self.assertEqual(row_index, 0) repetitions = RepetitionsTable(sequential_recordings_table=sws) @@ -952,7 +952,7 @@ def test_enforce_unique_id(self): ) sw = SimultaneousRecordingsTable(intracellular_recordings_table=ir) sw.add_simultaneous_recording(recordings=[0]) - sws = SequentialRecordingsTable(sw) + sws = SequentialRecordingsTable(simultaneous_recordings_table=sw) sws.add_sequential_recording(simultaneous_recordings=[0, ], stimulus_type='MyStimStype') repetitions = RepetitionsTable(sequential_recordings_table=sws) repetitions.add_repetition(sequential_recordings=[0, ], id=np.int64(10)) @@ -999,7 +999,7 @@ def test_basic_write(self): sw = SimultaneousRecordingsTable(intracellular_recordings_table=ir) row_index = sw.add_simultaneous_recording(recordings=[0]) self.assertEqual(row_index, 0) - sws = SequentialRecordingsTable(sw) + sws = SequentialRecordingsTable(simultaneous_recordings_table=sw) row_index = sws.add_sequential_recording(simultaneous_recordings=[0, ], stimulus_type='MyStimStype') self.assertEqual(row_index, 0) repetitions = RepetitionsTable(sequential_recordings_table=sws) @@ -1021,7 +1021,7 @@ def test_enforce_unique_id(self): id=np.int64(10)) sw = SimultaneousRecordingsTable(intracellular_recordings_table=ir) sw.add_simultaneous_recording(recordings=[0]) - sws = SequentialRecordingsTable(sw) + sws = SequentialRecordingsTable(simultaneous_recordings_table=sw) sws.add_sequential_recording(simultaneous_recordings=[0, ], stimulus_type='MyStimStype') repetitions = RepetitionsTable(sequential_recordings_table=sws) repetitions.add_repetition(sequential_recordings=[0, ]) diff --git a/tests/unit/test_image.py b/tests/unit/test_image.py index c279101f5..184ad6edd 100644 --- a/tests/unit/test_image.py +++ b/tests/unit/test_image.py @@ -20,7 +20,7 @@ class ImageSeriesConstructor(TestCase): def test_init(self): - dev = Device('test_device') + dev = Device(name='test_device') iS = ImageSeries( name='test_iS', unit='unit', @@ -411,4 +411,4 @@ def test_rgb_image(self): RGBImage(name='test_rgb_image', data=np.ones((2, 2, 3))) def test_rgba_image(self): - RGBAImage('test_rgba_image', np.ones((2, 2, 4))) + RGBAImage(name='test_rgba_image', data=np.ones((2, 2, 4))) diff --git a/tests/unit/test_misc.py b/tests/unit/test_misc.py index 9350d1d2e..173ac9fdf 100644 --- a/tests/unit/test_misc.py +++ b/tests/unit/test_misc.py @@ -11,14 +11,17 @@ class AnnotationSeriesConstructor(TestCase): def test_init(self): - aS = AnnotationSeries('test_aS', data=[1, 2, 3], timestamps=[1., 2., 3.]) + aS = AnnotationSeries(name='test_aS', data=[1, 2, 3], timestamps=[1., 2., 3.]) self.assertEqual(aS.name, 'test_aS') aS.add_annotation(2.0, 'comment') class AbstractFeatureSeriesConstructor(TestCase): def test_init(self): - aFS = AbstractFeatureSeries('test_aFS', ['feature units'], ['features'], timestamps=list()) + aFS = AbstractFeatureSeries(name='test_aFS', + feature_units=['feature units'], + features=['features'], + timestamps=list()) self.assertEqual(aFS.name, 'test_aFS') self.assertEqual(aFS.feature_units, ['feature units']) self.assertEqual(aFS.features, ['features']) @@ -115,7 +118,7 @@ class IntervalSeriesConstructor(TestCase): def test_init(self): data = [1.0, -1.0, 1.0, -1.0] timestamps = [0.0, 1.0, 2.0, 3.0] - iS = IntervalSeries('test_iS', data=data, timestamps=timestamps) + iS = IntervalSeries(name='test_iS', data=data, timestamps=timestamps) self.assertEqual(iS.name, 'test_iS') self.assertEqual(iS.data, data) self.assertEqual(iS.timestamps, timestamps) @@ -123,7 +126,7 @@ def test_init(self): def test_add_interval(self): data = [1.0, -1.0, 1.0, -1.0] timestamps = [0.0, 1.0, 2.0, 3.0] - iS = IntervalSeries('test_iS', data=data, timestamps=timestamps) + iS = IntervalSeries(name='test_iS', data=data, timestamps=timestamps) iS.add_interval(4.0, 5.0) data.append(1.0) data.append(-1.0) @@ -247,8 +250,11 @@ def test_times_and_intervals(self): def test_electrode_group(self): ut = Units() - device = Device('test_device') - electrode_group = ElectrodeGroup('test_electrode_group', 'description', 'location', device) + device = Device(name='test_device') + electrode_group = ElectrodeGroup(name='test_electrode_group', + description='description', + location='location', + device=device) ut.add_unit(electrode_group=electrode_group) self.assertEqual(ut['electrode_group'][0], electrode_group) diff --git a/tests/unit/test_ogen.py b/tests/unit/test_ogen.py index 678f284dc..15e36bbf7 100644 --- a/tests/unit/test_ogen.py +++ b/tests/unit/test_ogen.py @@ -6,7 +6,7 @@ class OptogeneticSeriesConstructor(TestCase): def test_init(self): - device = Device('name') + device = Device(name='name') oS = OptogeneticStimulusSite( name='site1', device=device, From b653c9c81e0715ecbe27efa0151f588e6c7423da Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Mon, 23 Dec 2024 12:45:21 -0500 Subject: [PATCH 3/8] update test to use kwargs --- tests/unit/test_icephys.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/unit/test_icephys.py b/tests/unit/test_icephys.py index 10974b446..642aa5411 100644 --- a/tests/unit/test_icephys.py +++ b/tests/unit/test_icephys.py @@ -161,8 +161,11 @@ def test_default(self): def test_gain_optional(self): electrode_name = GetElectrode() - pCS = PatchClampSeries('test_pCS', list(), 'unit', - electrode_name, timestamps=list()) + pCS = PatchClampSeries(name='test_pCS', + data=list(), + unit='unit', + electrode=electrode_name, + timestamps=list()) self.assertIsNone(pCS.gain) def test_sweepNumber_valid(self): From e47cd5a55f22dd1d35077a40848580d8d6542c65 Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Mon, 23 Dec 2024 10:18:58 -0800 Subject: [PATCH 4/8] Update deprecations (#2011) * update deprecated classes and methods * update deprecation warnings * update tests for deprecated classes and functions * fix warnings and formatting * add scratch tests and deprecations * keep function behavior for deprecation warnings * update CHANGELOG.md * add tests for deprecations * update scratch tutorial * skip deprecated gallery examples * update CHANGELOG.md * remove old icephys tutorial * add pynwb 4.0 deprecation info * add error on new pass on construct method * remove warnings on construct for deprecations * fix formatting * update hdmf-zarr url * Apply suggestions from code review Co-authored-by: Ryan Ly * Update src/pynwb/core.py Co-authored-by: Ryan Ly * add code review suggestions * add catch for extra warning * remove unused variable --------- Co-authored-by: Ryan Ly --- CHANGELOG.md | 21 ++ docs/gallery/domain/icephys.py | 239 --------------------- docs/gallery/general/scratch.py | 2 +- docs/source/conf.py | 4 +- src/pynwb/__init__.py | 6 - src/pynwb/base.py | 9 +- src/pynwb/core.py | 36 ++-- src/pynwb/ecephys.py | 8 +- src/pynwb/file.py | 118 ++++------ src/pynwb/icephys.py | 8 +- src/pynwb/image.py | 41 ++-- src/pynwb/ophys.py | 12 +- test.py | 3 +- tests/back_compat/test_import_structure.py | 1 - tests/integration/hdf5/test_ecephys.py | 60 ++++-- tests/integration/hdf5/test_icephys.py | 64 +++--- tests/unit/test_base.py | 28 +-- tests/unit/test_ecephys.py | 42 +++- tests/unit/test_file.py | 44 ++-- tests/unit/test_icephys.py | 80 +++---- tests/unit/test_icephys_metadata_tables.py | 164 +++++++++----- tests/unit/test_image.py | 88 ++++++-- tests/unit/test_ophys.py | 51 +++-- tests/unit/test_scratch.py | 60 ++++++ 24 files changed, 560 insertions(+), 629 deletions(-) delete mode 100644 docs/gallery/domain/icephys.py diff --git a/CHANGELOG.md b/CHANGELOG.md index a7320703f..503df39e6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,27 @@ ## PyNWB 3.0.0 (Upcoming) +### Deprecations +- The following deprecated classes will now raise errors when creating new instances of these classes: ``ClusteringWaveforms``, ``Clustering``, ``SweepTable``. Reading files using these data types will continue to be supported. +- The following methods and arguments have been deprecated: + - ``ProcessingModule.add_container`` and ``ProcessingModule.add_data_interface`` are replaced by ``ProcessingModule.add`` + - ``ProcessingModule.get_container`` and ``ProcessingModule.get_data_interface`` are replaced by ``ProcessingModule.get`` + - ``ScratchData.notes`` is deprecated. Use ``ScratchData.description`` instead. + - ``NWBFile.ic_electrodes`` is deprecated. Use ``NWBFile.icephys_electrodes`` instead. + - ``NWBFile.ec_electrodes`` is deprecated. Use ``NWBFile.electrodes`` instead. + - ``NWBFile.icephys_filtering`` is deprecated. Use ``IntracellularElectrode.filtering`` instead. + - ``NWBFile.modules`` is deprecated. Use ``NWBFile.processing`` instead. + - ``ImageSeries.format`` is fixed to 'external' if an external file is provided. + - ``ImageSeries.bits_per_pixel`` is deprecated. + - ``ImagingPlane.manifold``, ``ImagingPlane.conversion`` and ``ImagingPlane.unit`` are deprecated. Use ``ImagingPlane.origin_coords`` and ``ImagingPlane.grid_spacing`` instead. + - ``IndexSeries.unit`` is fixed to "N\A". + - ``IndexSeries.indexed_timeseries`` is deprecated. Use ``IndexSeries.indexed_images`` instead. +- The following deprecated methods have been removed: + - ``NWBFile.add_ic_electrode`` is removed. Use ``NWBFile.add_icephys_electrode`` instead. + - ``NWBFile.create_ic_electrode`` is removed. Use ``NWBFile.create_icephys_electrode`` instead. + - ``NWBFile.get_ic_electrode`` is removed. Use ``NWBFile.get_icephys_electrode`` instead. + - ``pynwb._get_resources`` is removed. + ### Enhancements and minor changes - Added support for NWB schema 2.8.0. @rly [#2001](https://github.com/NeurodataWithoutBorders/pynwb/pull/2001) - Removed `SpatialSeries.bounds` field that was not functional. This will be fixed in a future release. @rly [#1907](https://github.com/NeurodataWithoutBorders/pynwb/pull/1907), [#1996](https://github.com/NeurodataWithoutBorders/pynwb/pull/1996) diff --git a/docs/gallery/domain/icephys.py b/docs/gallery/domain/icephys.py deleted file mode 100644 index 04eae6bed..000000000 --- a/docs/gallery/domain/icephys.py +++ /dev/null @@ -1,239 +0,0 @@ -# -*- coding: utf-8 -*- -""" -.. _icephys_tutorial: - -Intracellular Electrophysiology Data using SweepTable -===================================================== - -The following tutorial describes storage of intracellular electrophysiology data in NWB using the -SweepTable to manage recordings. - -.. warning:: - The use of SweepTable has been deprecated as of PyNWB >v2.0 in favor of the new hierarchical - intracellular electrophysiology metadata tables to allow for a more complete description of - intracellular electrophysiology experiments. See the :doc:`Intracellular electrophysiology ` - tutorial for details. -""" - -####################### -# Creating and Writing NWB files -# ------------------------------ -# -# When creating a NWB file, the first step is to create the :py:class:`~pynwb.file.NWBFile`. The first -# argument is is a brief description of the dataset. - -# sphinx_gallery_thumbnail_path = 'figures/gallery_thumbnails_icephys_sweeptable.png' -from datetime import datetime -from uuid import uuid4 - -import numpy as np -from dateutil.tz import tzlocal - -from pynwb import NWBFile - -nwbfile = NWBFile( - session_description="my first synthetic recording", - identifier=str(uuid4()), - session_start_time=datetime.now(tzlocal()), - experimenter=[ - "Baggins, Bilbo", - ], - lab="Bag End Laboratory", - institution="University of Middle Earth at the Shire", - experiment_description="I went on an adventure to reclaim vast treasures.", - session_id="LONELYMTN001", -) - -####################### -# Device metadata -# ^^^^^^^^^^^^^^^ -# -# Device metadata is represented by :py:class:`~pynwb.device.Device` objects. -# To create a device, you can use the :py:class:`~pynwb.device.Device` instance method -# :py:meth:`~pynwb.file.NWBFile.create_device`. - -device = nwbfile.create_device(name="Heka ITC-1600") - -####################### -# Electrode metadata -# ^^^^^^^^^^^^^^^^^^ -# -# Intracellular electrode metadata is represented by :py:class:`~pynwb.icephys.IntracellularElectrode` objects. -# To create an electrode group, you can use the :py:class:`~pynwb.file.NWBFile` instance method -# :py:meth:`~pynwb.file.NWBFile.create_icephys_electrode`. - -elec = nwbfile.create_icephys_electrode( - name="elec0", description="a mock intracellular electrode", device=device -) - -####################### -# Stimulus data -# ^^^^^^^^^^^^^ -# -# Intracellular stimulus and response data are represented with subclasses of -# :py:class:`~pynwb.icephys.PatchClampSeries`. There are two classes for representing stimulus -# data -# -# - :py:class:`~pynwb.icephys.VoltageClampStimulusSeries` -# - :py:class:`~pynwb.icephys.CurrentClampStimulusSeries` -# -# and three classes for representing response -# -# - :py:class:`~pynwb.icephys.VoltageClampSeries` -# - :py:class:`~pynwb.icephys.CurrentClampSeries` -# - :py:class:`~pynwb.icephys.IZeroClampSeries` -# -# Here, we will use :py:class:`~pynwb.icephys.CurrentClampStimulusSeries` to store current clamp stimulus -# data and then add it to our NWBFile as stimulus data using the :py:class:`~pynwb.file.NWBFile` method -# :py:meth:`~pynwb.file.NWBFile.add_stimulus`. - -from pynwb.icephys import CurrentClampStimulusSeries - -ccss = CurrentClampStimulusSeries( - name="ccss", - data=[1, 2, 3, 4, 5], - starting_time=123.6, - rate=10e3, - electrode=elec, - gain=0.02, - sweep_number=0, -) - -nwbfile.add_stimulus(ccss, use_sweep_table=True) - -####################### -# We now add another stimulus series but from a different sweep. TimeSeries -# having the same starting time belong to the same sweep. - -from pynwb.icephys import VoltageClampStimulusSeries - -vcss = VoltageClampStimulusSeries( - name="vcss", - data=[2, 3, 4, 5, 6], - starting_time=234.5, - rate=10e3, - electrode=elec, - gain=0.03, - sweep_number=1, -) - -nwbfile.add_stimulus(vcss, use_sweep_table=True) - -####################### -# Here, we will use :py:class:`~pynwb.icephys.CurrentClampSeries` to store current clamp -# data and then add it to our NWBFile as acquired data using the :py:class:`~pynwb.file.NWBFile` method -# :py:meth:`~pynwb.file.NWBFile.add_acquisition`. - -from pynwb.icephys import CurrentClampSeries - -ccs = CurrentClampSeries( - name="ccs", - data=[0.1, 0.2, 0.3, 0.4, 0.5], - conversion=1e-12, - resolution=np.nan, - starting_time=123.6, - rate=20e3, - electrode=elec, - gain=0.02, - bias_current=1e-12, - bridge_balance=70e6, - capacitance_compensation=1e-12, - sweep_number=0, -) - -nwbfile.add_acquisition(ccs, use_sweep_table=True) - -####################### -# And voltage clamp data from the second sweep using -# :py:class:`~pynwb.icephys.VoltageClampSeries`. - -from pynwb.icephys import VoltageClampSeries - -vcs = VoltageClampSeries( - name="vcs", - data=[0.1, 0.2, 0.3, 0.4, 0.5], - conversion=1e-12, - resolution=np.nan, - starting_time=234.5, - rate=20e3, - electrode=elec, - gain=0.02, - capacitance_slow=100e-12, - resistance_comp_correction=70.0, - sweep_number=1, -) - -nwbfile.add_acquisition(vcs, use_sweep_table=True) - -#################### -# .. _icephys_writing: -# -# Once you have finished adding all of your data to the :py:class:`~pynwb.file.NWBFile`, -# write the file with :py:class:`~pynwb.NWBHDF5IO`. - -from pynwb import NWBHDF5IO - -with NWBHDF5IO("icephys_example.nwb", "w") as io: - io.write(nwbfile) - -#################### -# For more details on :py:class:`~pynwb.NWBHDF5IO`, see the :ref:`basic tutorial `. - -#################### -# .. _icephys_reading: -# -# Reading electrophysiology data -# ------------------------------ -# -# Now that you have written some intracellular electrophysiology data, you can read it back in. - -io = NWBHDF5IO("icephys_example.nwb", "r") -nwbfile = io.read() - -#################### -# For details on retrieving data from an :py:class:`~pynwb.file.NWBFile`, we refer the reader to the -# :ref:`basic tutorial `. For this tutorial, we will just get back our the -# :py:class:`~pynwb.icephys.CurrentClampStimulusSeries` object we added above. -# -# First, get the :py:class:`~pynwb.icephys.CurrentClampStimulusSeries` we added as stimulus data. - -ccss = nwbfile.get_stimulus("ccss") - -#################### -# Grabbing acquisition data can be done via :py:meth:`~pynwb.file.NWBFile.get_acquisition` - -vcs = nwbfile.get_acquisition("vcs") - -#################### -# We can also get back the electrode we added. - -elec = nwbfile.get_icephys_electrode("elec0") - -#################### -# Alternatively, we can also get this electrode from the :py:class:`~pynwb.icephys.CurrentClampStimulusSeries` -# we retrieved above. This is exposed via the :py:meth:`~pynwb.icephys.PatchClampSeries.electrode` attribute. - -elec = ccss.electrode - -#################### -# And the device name via :py:meth:`~pynwb.file.NWBFile.get_device` - -device = nwbfile.get_device("Heka ITC-1600") - -#################### -# If you have data from multiple electrodes and multiple sweeps, it can be -# tedious and expensive to search all :py:class:`~pynwb.icephys.PatchClampSeries` for the -# :py:class:`~pynwb.base.TimeSeries` with a given sweep. -# -# Fortunately you don't have to do that manually, instead you can just query -# the :py:class:`~pynwb.icephys.SweepTable` which stores the mapping between the -# PatchClampSeries which belongs to a certain sweep number via -# :py:meth:`~pynwb.icephys.SweepTable.get_series`. -# -# The following call will return the voltage clamp data of two timeseries -# consisting of acquisition and stimulus, from sweep 1. - -series = nwbfile.sweep_table.get_series(1) - -# close the file -io.close() diff --git a/docs/gallery/general/scratch.py b/docs/gallery/general/scratch.py index 0e00c5e96..715f14876 100644 --- a/docs/gallery/general/scratch.py +++ b/docs/gallery/general/scratch.py @@ -145,7 +145,7 @@ # Now lets do an analysis for which we do not have a specification, but we would like to store # the results for. -filt_ts = nwb_scratch.modules["filtering_module"]["filtered_timeseries"] +filt_ts = nwb_scratch.processing["filtering_module"]["filtered_timeseries"] fft = np.fft.fft(filt_ts.data) diff --git a/docs/source/conf.py b/docs/source/conf.py index 7c1f33fa3..c4966c491 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -148,7 +148,7 @@ def __call__(self, filename): 'nwbwidgets': ("https://nwb-widgets.readthedocs.io/en/latest/", None), 'nwb-overview': ("https://nwb-overview.readthedocs.io/en/latest/", None), 'zarr': ("https://zarr.readthedocs.io/en/stable/", None), - 'hdmf-zarr': ("https://hdmf-zarr.readthedocs.io/en/latest/", None), + 'hdmf-zarr': ("https://hdmf-zarr.readthedocs.io/en/stable/", None), 'numcodecs': ("https://numcodecs.readthedocs.io/en/latest/", None), } @@ -161,7 +161,7 @@ def __call__(self, filename): 'hdmf-docs': ('https://hdmf.readthedocs.io/en/stable/%s', '%s'), 'dandi': ('https://www.dandiarchive.org/%s', '%s'), "nwbinspector": ("https://nwbinspector.readthedocs.io/en/dev/%s", "%s"), - 'hdmf-zarr': ('https://hdmf-zarr.readthedocs.io/en/latest/%s', '%s'), + 'hdmf-zarr': ('https://hdmf-zarr.readthedocs.io/en/stable/%s', '%s'), } nitpicky = True diff --git a/src/pynwb/__init__.py b/src/pynwb/__init__.py index 32f9ed4d9..5f8fec157 100644 --- a/src/pynwb/__init__.py +++ b/src/pynwb/__init__.py @@ -79,12 +79,6 @@ def __get_resources() -> dict: return ret -def _get_resources(): - # LEGACY: Needed to support legacy implementation. - # TODO: Remove this in PyNWB 3.0. - warn("The function '_get_resources' is deprecated and will be removed in a future release.", DeprecationWarning) - return __get_resources() - # a global type map global __TYPE_MAP diff --git a/src/pynwb/base.py b/src/pynwb/base.py index 0bc5d22d7..bc3aa11b8 100644 --- a/src/pynwb/base.py +++ b/src/pynwb/base.py @@ -51,7 +51,7 @@ def add_container(self, **kwargs): ''' Add an NWBContainer to this ProcessingModule ''' - warn(PendingDeprecationWarning('add_container will be replaced by add')) + warn("add_container is deprecated and will be removed in PyNWB 4.0. Use add instead.", DeprecationWarning) self.add(kwargs['container']) @docval({'name': 'container_name', 'type': str, 'doc': 'the name of the NWBContainer to retrieve'}) @@ -59,18 +59,17 @@ def get_container(self, **kwargs): ''' Retrieve an NWBContainer from this ProcessingModule ''' - warn(PendingDeprecationWarning('get_container will be replaced by get')) + warn('get_container is deprecated and will be removed in PyNWB 4.0. Use get instead.', DeprecationWarning) return self.get(kwargs['container_name']) @docval({'name': 'NWBDataInterface', 'type': (NWBDataInterface, DynamicTable), 'doc': 'the NWBDataInterface to add to this Module'}) def add_data_interface(self, **kwargs): - warn(PendingDeprecationWarning('add_data_interface will be replaced by add')) - self.add(kwargs['NWBDataInterface']) + warn('add_data_interface is deprecated and will be removed in PyNWB 4.0. Use add instead.', DeprecationWarning) @docval({'name': 'data_interface_name', 'type': str, 'doc': 'the name of the NWBContainer to retrieve'}) def get_data_interface(self, **kwargs): - warn(PendingDeprecationWarning('get_data_interface will be replaced by get')) + warn('get_data_interface is deprecated and will be removed in PyNWB 4.0. Use get instead.', DeprecationWarning) return self.get(kwargs['data_interface_name']) diff --git a/src/pynwb/core.py b/src/pynwb/core.py index 59f849db8..332bb5b50 100644 --- a/src/pynwb/core.py +++ b/src/pynwb/core.py @@ -38,15 +38,21 @@ def _error_on_new_warn_on_construct(self, error_msg: str): Raise an error when a check is violated on instance creation. To ensure backwards compatibility, this method throws a warning instead of raising an error when reading from a file, ensuring that - files with invalid data can be read. If error_msg is set to None - the function will simply return without further action. + files with invalid data can be read. """ - if error_msg is None: - return if not self._in_construct_mode: raise ValueError(error_msg) warn(error_msg) + def _error_on_new_pass_on_construct(self, error_msg: str): + """ + Raise an error when a check is violated on instance creation. + When reading from a file, do nothing, ensuring that files with + invalid data or deprecated neurodata types can be read. + """ + if not self._in_construct_mode: + raise ValueError(error_msg) + def _get_type_map(self): return get_type_map() @@ -130,27 +136,29 @@ def __init__(self, **kwargs): notes, description = popargs('notes', 'description', kwargs) super().__init__(**kwargs) if notes != '': - warn('The `notes` argument of ScratchData.__init__ will be deprecated. Use description instead.', - PendingDeprecationWarning) - if notes != '' and description != '': + self._error_on_new_pass_on_construct( + error_msg=("The `notes` argument of ScratchData.__init__ has been deprecated and " + "will be removed in PyNWB 4.0. Use description instead.") + ) + if description is not None: raise ValueError('Cannot provide both notes and description to ScratchData.__init__. The description ' 'argument is recommended.') description = notes if not description: - warn('ScratchData.description will be required in a future major release of PyNWB.', - PendingDeprecationWarning) + self._error_on_new_pass_on_construct(error_msg='ScratchData.description is required.') self.description = description @property def notes(self): - warn('Use of ScratchData.notes will be deprecated. Use ScratchData.description instead.', - PendingDeprecationWarning) + warn(("Use of ScratchData.notes has been deprecated and will be removed in PyNWB 4.0. " + "Use ScratchData.description instead."), DeprecationWarning) return self.description - + @notes.setter def notes(self, value): - warn('Use of ScratchData.notes will be deprecated. Use ScratchData.description instead.', - PendingDeprecationWarning) + self._error_on_new_pass_on_construct( + error_msg=("Use of ScratchData.notes has been deprecated and will be removed in PyNWB 4.0. " + "Use ScratchData.description instead.")) self.description = value diff --git a/src/pynwb/ecephys.py b/src/pynwb/ecephys.py index 41535bbbc..1c5400fe3 100644 --- a/src/pynwb/ecephys.py +++ b/src/pynwb/ecephys.py @@ -231,7 +231,9 @@ class Clustering(NWBDataInterface): 'shape': (None,)}, {'name': 'name', 'type': str, 'doc': 'the name of this container', 'default': 'Clustering'}) def __init__(self, **kwargs): - warnings.warn("use pynwb.misc.Units or NWBFile.units instead", DeprecationWarning) + self._error_on_new_pass_on_construct( + error_msg='The Clustering neurodata type is deprecated. Use pynwb.misc.Units or NWBFile.units instead' + ) args_to_set = popargs_to_dict(('description', 'num', 'peak_over_rms', 'times'), kwargs) super().__init__(**kwargs) args_to_set['peak_over_rms'] = list(args_to_set['peak_over_rms']) @@ -265,7 +267,9 @@ class ClusterWaveforms(NWBDataInterface): 'doc': 'the standard deviations of waveforms for each cluster'}, {'name': 'name', 'type': str, 'doc': 'the name of this container', 'default': 'ClusterWaveforms'}) def __init__(self, **kwargs): - warnings.warn("use pynwb.misc.Units or NWBFile.units instead", DeprecationWarning) + self._error_on_new_pass_on_construct( + error_msg='The ClusterWaveforms neurodata type is deprecated. Use pynwb.misc.Units or NWBFile.units instead' + ) args_to_set = popargs_to_dict(('clustering_interface', 'waveform_filtering', 'waveform_mean', 'waveform_sd'), kwargs) super().__init__(**kwargs) diff --git a/src/pynwb/file.py b/src/pynwb/file.py index 4b4010d4d..27f4acada 100644 --- a/src/pynwb/file.py +++ b/src/pynwb/file.py @@ -384,9 +384,11 @@ class NWBFile(MultiContainerInterface, HERDManager): 'doc': 'the ElectrodeGroups that belong to this NWBFile', 'default': None}, {'name': 'ic_electrodes', 'type': (list, tuple), 'doc': 'DEPRECATED use icephys_electrodes parameter instead. ' - 'IntracellularElectrodes that belong to this NWBFile', 'default': None}, + 'IntracellularElectrodes that belong to this NWBFile', 'default': None}, + # TODO remove this arg in PyNWB 4.0 {'name': 'sweep_table', 'type': SweepTable, - 'doc': 'the SweepTable that belong to this NWBFile', 'default': None}, + 'doc': '[DEPRECATED] Use IntracellularRecordingsTable instead. ' + 'The SweepTable that belong to this NWBFile', 'default': None}, {'name': 'imaging_planes', 'type': (list, tuple), 'doc': 'ImagingPlanes that belong to this NWBFile', 'default': None}, {'name': 'ogen_sites', 'type': (list, tuple), @@ -490,14 +492,20 @@ def __init__(self, **kwargs): args_to_set['file_create_date'] = list(map(_add_missing_timezone, file_create_date)) # backwards-compatibility code for ic_electrodes / icephys_electrodes - icephys_electrodes = args_to_set['icephys_electrodes'] ic_electrodes = args_to_set['ic_electrodes'] - if icephys_electrodes is None and ic_electrodes is not None: - warn("Use of the ic_electrodes parameter is deprecated. " - "Use the icephys_electrodes parameter instead", DeprecationWarning) + if ic_electrodes is not None: + self._error_on_new_pass_on_construct(error_msg=("Use of the ic_electrodes parameter is deprecated " + "and will be removed in PyNWB 4.0. " + "Use the icephys_electrodes parameter instead")) args_to_set['icephys_electrodes'] = ic_electrodes args_to_set.pop('ic_electrodes') # do not set this arg + # backwards-compatibility for sweep table + if args_to_set['sweep_table'] is not None: + self._error_on_new_pass_on_construct(error_msg=("SweepTable is deprecated. Use the " + "IntracellularRecordingsTable instead. See also the " + "NWBFile.add_intracellular_recordings function.")) + # convert single experimenter to tuple experimenter = args_to_set['experimenter'] if isinstance(experimenter, str): @@ -552,30 +560,10 @@ def objects(self): self.all_children() return self.__obj - @property - def modules(self): - warn("NWBFile.modules has been replaced by NWBFile.processing.", DeprecationWarning) - return self.processing - @property def epoch_tags(self): return set(self.epochs.tags[:]) if self.epochs is not None else set() - @property - def ec_electrode_groups(self): - warn("NWBFile.ec_electrode_groups has been replaced by NWBFile.electrode_groups.", DeprecationWarning) - return self.electrode_groups - - @property - def ec_electrodes(self): - warn("NWBFile.ec_electrodes has been replaced by NWBFile.electrodes.", DeprecationWarning) - return self.electrodes - - @property - def ic_electrodes(self): - warn("NWBFile.ic_electrodes has been replaced by NWBFile.icephys_electrodes.", DeprecationWarning) - return self.icephys_electrodes - @property def icephys_filtering(self): return self.fields.get('icephys_filtering') @@ -583,34 +571,11 @@ def icephys_filtering(self): @icephys_filtering.setter def icephys_filtering(self, val): if val is not None: - warn("Use of icephys_filtering is deprecated. Use the IntracellularElectrode.filtering field instead", - DeprecationWarning) + self._error_on_new_warn_on_construct("Use of icephys_filtering is deprecated " + "and will be removed in PyNWB 4.0. " + "Use the IntracellularElectrode.filtering field instead") self.fields['icephys_filtering'] = val - def add_ic_electrode(self, *args, **kwargs): - """ - This method is deprecated and will be removed in future versions. Please - use :py:meth:`~pynwb.file.NWBFile.add_icephys_electrode` instead - """ - warn("NWBFile.add_ic_electrode has been replaced by NWBFile.add_icephys_electrode.", DeprecationWarning) - return self.add_icephys_electrode(*args, **kwargs) - - def create_ic_electrode(self, *args, **kwargs): - """ - This method is deprecated and will be removed in future versions. Please - use :py:meth:`~pynwb.file.NWBFile.create_icephys_electrode` instead - """ - warn("NWBFile.create_ic_electrode has been replaced by NWBFile.create_icephys_electrode.", DeprecationWarning) - return self.create_icephys_electrode(*args, **kwargs) - - def get_ic_electrode(self, *args, **kwargs): - """ - This method is deprecated and will be removed in future versions. Please - use :py:meth:`~pynwb.file.NWBFile.get_icephys_electrode` instead - """ - warn("NWBFile.get_ic_electrode has been replaced by NWBFile.get_icephys_electrode.", DeprecationWarning) - return self.get_icephys_electrode(*args, **kwargs) - def __check_epochs(self): if self.epochs is None: self.epochs = TimeIntervals(name='epochs', description='experimental epochs') @@ -624,13 +589,6 @@ def add_epoch_column(self, **kwargs): self.__check_epochs() self.epochs.add_column(**kwargs) - def add_epoch_metadata_column(self, *args, **kwargs): - """ - This method is deprecated and will be removed in future versions. Please - use :py:meth:`~pynwb.file.NWBFile.add_epoch_column` instead - """ - raise DeprecationWarning("Please use NWBFile.add_epoch_column") - @docval(*get_docval(TimeIntervals.add_interval), allow_extra=True) def add_epoch(self, **kwargs): @@ -839,7 +797,15 @@ def _check_sweep_table(self): Create a SweepTable if not yet done. """ if self.sweep_table is None: - self.sweep_table = SweepTable(name='sweep_table') + if self._in_construct_mode: + # Construct the SweepTable without triggering errors in construct mode because + # SweepTable has been deprecated + sweep_table = SweepTable.__new__(SweepTable, parent=self, in_construct_mode=True) + sweep_table.__init__(name='sweep_table') + sweep_table._in_construct_mode = False + else: + sweep_table = SweepTable(name='sweep_table') + self.sweep_table = sweep_table def _update_sweep_table(self, nwbdata): """ @@ -860,25 +826,11 @@ def add_acquisition(self, **kwargs): if use_sweep_table: self._update_sweep_table(nwbdata) - @docval({'name': 'stimulus', 'type': (TimeSeries, DynamicTable, NWBDataInterface), 'default': None, + @docval({'name': 'stimulus', 'type': (TimeSeries, DynamicTable, NWBDataInterface), 'doc': 'The stimulus presentation data to add to this NWBFile.'}, - {'name': 'use_sweep_table', 'type': bool, 'default': False, 'doc': 'Use the deprecated SweepTable'}, - {'name': 'timeseries', 'type': TimeSeries, 'default': None, - 'doc': 'The "timeseries" keyword argument is deprecated. Use the "nwbdata" argument instead.'},) + {'name': 'use_sweep_table', 'type': bool, 'default': False, 'doc': 'Use the deprecated SweepTable'},) def add_stimulus(self, **kwargs): - stimulus, timeseries = popargs('stimulus', 'timeseries', kwargs) - if stimulus is None and timeseries is None: - raise ValueError( - "The 'stimulus' keyword argument is required. The 'timeseries' keyword argument can be " - "provided for backwards compatibility but is deprecated in favor of 'stimulus' and will be " - "removed in PyNWB 3.0." - ) - # TODO remove this support in PyNWB 3.0 - if timeseries is not None: - warn("The 'timeseries' keyword argument is deprecated and will be removed in PyNWB 3.0. " - "Use the 'stimulus' argument instead.", DeprecationWarning) - if stimulus is None: - stimulus = timeseries + stimulus = popargs('stimulus', kwargs) self._add_stimulus_internal(stimulus) use_sweep_table = popargs('use_sweep_table', kwargs) if use_sweep_table: @@ -1067,12 +1019,14 @@ def get_icephys_meta_parent_table(self): 'doc': 'The name of the data. Required only when passing in a scalar, numpy.ndarray, list, or tuple', 'default': None}, {'name': 'notes', 'type': str, - 'doc': ('Notes to add to the data. Only used when passing in numpy.ndarray, list, or tuple. This ' - 'argument is not recommended. Use the `description` argument instead.'), + 'doc': ('[DEPRECATED] Notes to add to the data. ' + 'Only used when passing in numpy.ndarray, list, or tuple. This argument is not recommended. ' + 'Use the `description` argument instead.'), # TODO remove this arg in PyNWB 4.0 'default': None}, {'name': 'table_description', 'type': str, - 'doc': ('Description for the internal DynamicTable used to store a pandas.DataFrame. This ' + 'doc': ('[DEPRECATED] Description for the internal DynamicTable used to store a pandas.DataFrame. This ' 'argument is not recommended. Use the `description` argument instead.'), + # TODO remove this arg in PyNWB 4.0 'default': ''}, {'name': 'description', 'type': str, 'doc': ('Description of the data. Required only when passing in a scalar, numpy.ndarray, ' @@ -1084,8 +1038,8 @@ def add_scratch(self, **kwargs): data, name, notes, table_description, description = getargs('data', 'name', 'notes', 'table_description', 'description', kwargs) if notes is not None or table_description != '': - warn('Use of the `notes` or `table_description` argument will be removed in a future version of PyNWB. ' - 'Use the `description` argument instead.', PendingDeprecationWarning) + warn(('Use of the `notes` or `table_description` argument is deprecated and will be removed in PyNWB 4.0. ' + 'Use the `description` argument instead.'), DeprecationWarning) if description is not None: raise ValueError('Cannot call add_scratch with (notes or table_description) and description') diff --git a/src/pynwb/icephys.py b/src/pynwb/icephys.py index ef536122c..b8c082535 100644 --- a/src/pynwb/icephys.py +++ b/src/pynwb/icephys.py @@ -345,9 +345,11 @@ class SweepTable(DynamicTable): 'default': "A sweep table groups different PatchClampSeries together."}, *get_docval(DynamicTable.__init__, 'id', 'columns', 'colnames')) def __init__(self, **kwargs): - warnings.warn("Use of SweepTable is deprecated. Use the IntracellularRecordingsTable " - "instead. See also the NWBFile.add_intracellular_recordings function.", - DeprecationWarning) + error_msg=('SweepTable is deprecated. Use the IntracellularRecordingsTable instead. ' + 'See also the NWBFile.add_intracellular_recordings function.') + if not self._in_construct_mode: + raise ValueError(error_msg) + super().__init__(**kwargs) @docval({'name': 'pcs', 'type': PatchClampSeries, diff --git a/src/pynwb/image.py b/src/pynwb/image.py index d5d3e9114..d2ee1c825 100644 --- a/src/pynwb/image.py +++ b/src/pynwb/image.py @@ -96,13 +96,10 @@ def __init__(self, **kwargs): setattr(self, key, val) if self._change_external_file_format(): - warnings.warn( - "%s '%s': The value for 'format' has been changed to 'external'. " - "Setting a default value for 'format' is deprecated and will be changed " - "to raising a ValueError in the next major release." - % (self.__class__.__name__, self.name), - DeprecationWarning, - ) + self._error_on_new_warn_on_construct(error_msg=f"{self.__class__.__name__} '{self.name}': " + "The value for 'format' has been changed to 'external'. " + "If an external file is detected, setting a value for " + "'format' other than 'external' is deprecated.") if not self._check_image_series_dimension(): warnings.warn( @@ -111,13 +108,17 @@ def __init__(self, **kwargs): % (self.__class__.__name__, self.name) ) - self._error_on_new_warn_on_construct( - error_msg=self._check_external_file_starting_frame_length() - ) - self._error_on_new_warn_on_construct( - error_msg=self._check_external_file_format() - ) - self._error_on_new_warn_on_construct(error_msg=self._check_external_file_data()) + error_msg = self._check_external_file_starting_frame_length() + if error_msg: + self._error_on_new_warn_on_construct(error_msg=error_msg) + + error_msg = self._check_external_file_format() + if error_msg: + self._error_on_new_warn_on_construct(error_msg=error_msg) + + error_msg = self._check_external_file_data() + if error_msg: + self._error_on_new_warn_on_construct(error_msg=error_msg) def _change_external_file_format(self): """ @@ -200,7 +201,7 @@ def bits_per_pixel(self): @bits_per_pixel.setter def bits_per_pixel(self, val): if val is not None: - warnings.warn("bits_per_pixel is no longer used", DeprecationWarning) + self._error_on_new_pass_on_construct(error_msg="bits_per_pixel is deprecated") self.fields['bits_per_pixel'] = val @@ -258,16 +259,14 @@ class IndexSeries(TimeSeries): def __init__(self, **kwargs): indexed_timeseries, indexed_images = popargs('indexed_timeseries', 'indexed_images', kwargs) if kwargs['unit'] and kwargs['unit'] != 'N/A': - msg = ("The 'unit' field of IndexSeries is fixed to the value 'N/A' starting in NWB 2.5. Passing " - "a different value for 'unit' will raise an error in PyNWB 3.0.") - warnings.warn(msg, PendingDeprecationWarning) + self._error_on_new_pass_on_construct(error_msg=("The 'unit' field of IndexSeries is " + "fixed to the value 'N/A'.")) if not indexed_timeseries and not indexed_images: msg = "Either indexed_timeseries or indexed_images must be provided when creating an IndexSeries." raise ValueError(msg) if indexed_timeseries: - msg = ("The indexed_timeseries field of IndexSeries is discouraged and will be deprecated in " - "a future version of NWB. Use the indexed_images field instead.") - warnings.warn(msg, PendingDeprecationWarning) + self._error_on_new_pass_on_construct("The indexed_timeseries field of IndexSeries is deprecated. " + "Use the indexed_images field instead.") kwargs['unit'] = 'N/A' # fixed value starting in NWB 2.5 super().__init__(**kwargs) self.indexed_timeseries = indexed_timeseries diff --git a/src/pynwb/ophys.py b/src/pynwb/ophys.py index aecba765d..54c355e70 100644 --- a/src/pynwb/ophys.py +++ b/src/pynwb/ophys.py @@ -115,14 +115,14 @@ def __init__(self, **kwargs): if not isinstance(args_to_set['optical_channel'], list): args_to_set['optical_channel'] = [args_to_set['optical_channel']] if args_to_set['manifold'] is not None: - warnings.warn("The 'manifold' argument is deprecated in favor of 'origin_coords' and 'grid_spacing'.", - DeprecationWarning) + error_msg = "The 'manifold' argument is deprecated in favor of 'origin_coords' and 'grid_spacing'." + self._error_on_new_pass_on_construct(error_msg=error_msg) if args_to_set['conversion'] != 1.0: - warnings.warn("The 'conversion' argument is deprecated in favor of 'origin_coords' and 'grid_spacing'.", - DeprecationWarning) + error_msg = "The 'conversion' argument is deprecated in favor of 'origin_coords' and 'grid_spacing'." + self._error_on_new_pass_on_construct(error_msg=error_msg) if args_to_set['unit'] != 'meters': - warnings.warn("The 'unit' argument is deprecated in favor of 'origin_coords_unit' and 'grid_spacing_unit'.", - DeprecationWarning) + error_msg = "The 'unit' argument is deprecated in favor of 'origin_coords_unit' and 'grid_spacing_unit'." + self._error_on_new_pass_on_construct(error_msg=error_msg) for key, val in args_to_set.items(): setattr(self, key, val) diff --git a/test.py b/test.py index f64fcd75d..de3c2c762 100644 --- a/test.py +++ b/test.py @@ -90,7 +90,6 @@ def _import_from_file(script): os.path.join('domain', 'brain_observatory.py'), # TODO create separate workflow for this ] - def run_example_tests(): """Run the Sphinx gallery example files, excluding ROS3-dependent ones, to check for errors.""" logging.info('running example tests') @@ -99,7 +98,7 @@ def run_example_tests(): for f in files: if f.endswith(".py"): name_with_parent_dir = os.path.join(os.path.basename(root), f) - if name_with_parent_dir in ros3_examples or name_with_parent_dir in allensdk_examples: + if name_with_parent_dir in [*ros3_examples, *allensdk_examples]: logging.info("Skipping %s" % name_with_parent_dir) continue examples_scripts.append(os.path.join(root, f)) diff --git a/tests/back_compat/test_import_structure.py b/tests/back_compat/test_import_structure.py index 81c4acf90..8fde69520 100644 --- a/tests/back_compat/test_import_structure.py +++ b/tests/back_compat/test_import_structure.py @@ -41,7 +41,6 @@ def test_outer_import_structure(self): "__spec__", "__version__", "_due", - "_get_resources", "_version", "available_namespaces", "base", diff --git a/tests/integration/hdf5/test_ecephys.py b/tests/integration/hdf5/test_ecephys.py index a2c0db4b5..9bfdf3086 100644 --- a/tests/integration/hdf5/test_ecephys.py +++ b/tests/integration/hdf5/test_ecephys.py @@ -172,19 +172,31 @@ class TestClusteringIO(AcquisitionH5IOMixin, TestCase): def setUpContainer(self): """ Return a test Clustering to read/write """ - with self.assertWarnsWith(DeprecationWarning, 'use pynwb.misc.Units or NWBFile.units instead'): - return Clustering("A fake Clustering interface", - [0, 1, 2, 0, 1, 2], [100., 101., 102.], [float(i) for i in range(10, 61, 10)]) + # raise error on write + error_msg = "The Clustering neurodata type is deprecated. Use pynwb.misc.Units or NWBFile.units instead" + kwargs = dict(description="A fake Clustering interface", + num=[0, 1, 2, 0, 1, 2], + peak_over_rms=[100., 101., 102.], + times=[float(i) for i in range(10, 61, 10)]) + + # create object with deprecated argument + with self.assertRaisesWith(ValueError, error_msg): + Clustering(**kwargs) + + # create object in construct mode, modeling the behavior of the ObjectMapper on read + # no warning should be raised + obj = Clustering.__new__(Clustering, in_construct_mode=True) + obj.__init__(**kwargs) + + return obj def roundtripContainer(self, cache_spec=False): - # catch the DeprecationWarning raised when reading the Clustering object from file - with self.assertWarnsWith(DeprecationWarning, 'use pynwb.misc.Units or NWBFile.units instead'): - return super().roundtripContainer(cache_spec) + # no warning or error should be raised when reading the Clustering object from file + return super().roundtripContainer(cache_spec) def roundtripExportContainer(self, cache_spec=False): - # catch the DeprecationWarning raised when reading the Clustering object from file - with self.assertWarnsWith(DeprecationWarning, 'use pynwb.misc.Units or NWBFile.units instead'): - return super().roundtripExportContainer(cache_spec) + # no warning or error should be raised when reading the Clustering object from file + return super().roundtripExportContainer(cache_spec) class SpikeEventSeriesConstructor(NWBH5IOFlexMixin, TestCase): @@ -223,12 +235,26 @@ def setUpContainer(self): times = [1.3, 2.3] num = [3, 4] peak_over_rms = [5.3, 6.3] - with self.assertWarnsWith(DeprecationWarning, 'use pynwb.misc.Units or NWBFile.units instead'): - self.clustering = Clustering('description', num, peak_over_rms, times) + + # raise error on write + clust = Clustering.__new__(Clustering, in_construct_mode=True) + clust.__init__('description', num, peak_over_rms, times) + self.clustering = clust + means = [[7.3, 7.3]] stdevs = [[8.3, 8.3]] - with self.assertWarnsWith(DeprecationWarning, 'use pynwb.misc.Units or NWBFile.units instead'): + msg = "The ClusterWaveforms neurodata type is deprecated. Use pynwb.misc.Units or NWBFile.units instead" + with self.assertRaisesWith(ValueError, msg): cw = ClusterWaveforms(self.clustering, 'filtering', means, stdevs) + + # create object in construct mode, modeling the behavior of the ObjectMapper on read + # no warning should be raised + cw = ClusterWaveforms.__new__(ClusterWaveforms, + container_source=None, + parent=None, + in_construct_mode=True) + cw.__init__(self.clustering, 'filtering', means, stdevs) + return cw def addContainer(self, nwbfile): @@ -237,14 +263,12 @@ def addContainer(self, nwbfile): nwbfile.add_acquisition(self.container) def roundtripContainer(self, cache_spec=False): - # catch the DeprecationWarning raised when reading the Clustering object from file - with self.assertWarnsWith(DeprecationWarning, 'use pynwb.misc.Units or NWBFile.units instead'): - return super().roundtripContainer(cache_spec) + # no warning or error should be raised when reading the Clustering object from file + return super().roundtripContainer(cache_spec) def roundtripExportContainer(self, cache_spec=False): - # catch the DeprecationWarning raised when reading the Clustering object from file - with self.assertWarnsWith(DeprecationWarning, 'use pynwb.misc.Units or NWBFile.units instead'): - return super().roundtripExportContainer(cache_spec) + # no warning or error should be raised when reading the Clustering object from file + return super().roundtripExportContainer(cache_spec) class FeatureExtractionConstructor(NWBH5IOFlexMixin, TestCase): diff --git a/tests/integration/hdf5/test_icephys.py b/tests/integration/hdf5/test_icephys.py index b50e5d9f5..c40a51b53 100644 --- a/tests/integration/hdf5/test_icephys.py +++ b/tests/integration/hdf5/test_icephys.py @@ -6,7 +6,6 @@ VoltageClampSeries, IZeroClampSeries) from pynwb.device import Device from pynwb.testing import NWBH5IOMixin, AcquisitionH5IOMixin, TestCase -import warnings class TestIntracellularElectrode(NWBH5IOMixin, TestCase): @@ -153,20 +152,24 @@ def setUpContainer(self): self.pcs = PatchClampSeries(name="pcs", data=[1, 2, 3, 4, 5], unit='A', starting_time=123.6, rate=10e3, electrode=self.elec, gain=0.126, stimulus_description="gotcha ya!", sweep_number=np.uint(4711)) - # Create the SweepTable but ignore the DeprecationWarning - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('ignore', DeprecationWarning) - sweeptable = SweepTable(name='sweep_table') - # Reissue any other warnings that may have occurred - for i in w: - warnings.warn(i.message, i.category) + + # create the sweeptable in construct mode, modeling the behavior of the ObjectMapper on read + # no user warning or error should be raised + sweeptable = SweepTable.__new__(SweepTable) + sweeptable._in_construct_mode = True + sweeptable.__init__(name='sweep_table') + sweeptable._in_construct_mode = False + return sweeptable def addContainer(self, nwbfile): """ Add the test SweepTable, PatchClampSeries, IntracellularElectrode, and Device to the given NWBFile """ - nwbfile.sweep_table = self.container + # NOTE - if the SweepTable creation warning has already been bypassed so that self.container is an + # instance of SweepTable, then adding sweep_table to the NWBFile in this way will not trigger an + # error + nwbfile.sweep_table = self.container nwbfile.add_device(self.device) nwbfile.add_icephys_electrode(self.elec) nwbfile.add_acquisition(self.pcs, use_sweep_table=True) @@ -177,17 +180,13 @@ def getContainer(self, nwbfile): def roundtripContainer(self, cache_spec=False): # catch the DeprecationWarning raised when reading the SweepTable object from file - with self.assertWarnsWith(DeprecationWarning, - "Use of SweepTable is deprecated. Use the IntracellularRecordingsTable instead. " - "See also the NWBFile.add_intracellular_recordings function."): - return super().roundtripContainer(cache_spec) + # no warning or error message should be raised + return super().roundtripContainer(cache_spec) def roundtripExportContainer(self, cache_spec=False): # catch the DeprecationWarning raised when reading the SweepTable object from file - with self.assertWarnsWith(DeprecationWarning, - "Use of SweepTable is deprecated. Use the IntracellularRecordingsTable instead. " - "See also the NWBFile.add_intracellular_recordings function."): - return super().roundtripExportContainer(cache_spec) + # no warning or error message should be raised + return super().roundtripExportContainer(cache_spec) def test_container(self): """ Test properties of the SweepTable read from file """ @@ -224,19 +223,22 @@ def setUpContainer(self): starting_time=123.6, rate=10e3, electrode=self.elec, gain=0.126, stimulus_description="gotcha ya!", sweep_number=np.uint(4712)) - # Create the SweepTable but ignore the DeprecationWarning - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter('ignore', DeprecationWarning) - sweeptable = SweepTable(name='sweep_table') - # Reissue any other warnings that may have occurred - for i in w: - warnings.warn(i.message, i.category) + # create the sweeptable in construct mode, modeling the behavior of the ObjectMapper on read + # no warning or error should be raised + sweeptable = SweepTable.__new__(SweepTable) + sweeptable._in_construct_mode = True + sweeptable.__init__(name='sweep_table') + sweeptable._in_construct_mode = False + return sweeptable def addContainer(self, nwbfile): """ Add the test SweepTable, PatchClampSeries, IntracellularElectrode, and Device to the given NWBFile """ + # NOTE - if the SweepTable creation error has already been bypassed so that self.container is an + # instance of SweepTable, then adding sweep_table to the NWBFile in this way will not trigger an + # error nwbfile.sweep_table = self.container nwbfile.add_device(self.device) nwbfile.add_icephys_electrode(self.elec) @@ -250,18 +252,12 @@ def getContainer(self, nwbfile): return nwbfile.sweep_table def roundtripContainer(self, cache_spec=False): - # catch the DeprecationWarning raised when reading the SweepTable object from file - with self.assertWarnsWith(DeprecationWarning, - "Use of SweepTable is deprecated. Use the IntracellularRecordingsTable instead. " - "See also the NWBFile.add_intracellular_recordings function."): - return super().roundtripContainer(cache_spec) + # no warning or error should be raised when reading the SweepTable object from file + return super().roundtripContainer(cache_spec) def roundtripExportContainer(self, cache_spec=False): - # catch the DeprecationWarning raised when reading the SweepTable object from file - with self.assertWarnsWith(DeprecationWarning, - "Use of SweepTable is deprecated. Use the IntracellularRecordingsTable instead. " - "See also the NWBFile.add_intracellular_recordings function."): - return super().roundtripExportContainer(cache_spec) + # no warning or error should be raised when reading the SweepTable object from file + return super().roundtripExportContainer(cache_spec) def test_container(self): """ Test properties of the SweepTable read from file """ diff --git a/tests/unit/test_base.py b/tests/unit/test_base.py index 5af4986ac..a1afe1d15 100644 --- a/tests/unit/test_base.py +++ b/tests/unit/test_base.py @@ -45,21 +45,19 @@ def test_add_data_interface(self): def test_deprecated_add_data_interface(self): ts = self._create_time_series() - with self.assertWarnsWith( - PendingDeprecationWarning, "add_data_interface will be replaced by add" + msg = 'add_data_interface is deprecated and will be removed in PyNWB 4.0. Use add instead.' + with self.assertWarnsWith(warn_type=DeprecationWarning, + exc_msg=msg ): self.pm.add_data_interface(ts) - self.assertIn(ts.name, self.pm.containers) - self.assertIs(ts, self.pm.containers[ts.name]) def test_deprecated_add_container(self): ts = self._create_time_series() - with self.assertWarnsWith( - PendingDeprecationWarning, "add_container will be replaced by add" + msg = 'add_container is deprecated and will be removed in PyNWB 4.0. Use add instead.' + with self.assertWarnsWith(warn_type=DeprecationWarning, + exc_msg=msg ): self.pm.add_container(ts) - self.assertIn(ts.name, self.pm.containers) - self.assertIs(ts, self.pm.containers[ts.name]) def test_get_data_interface(self): """Test adding a data interface to a ProcessingModule and retrieving it using get(...).""" @@ -72,20 +70,22 @@ def test_get_data_interface(self): def test_deprecated_get_data_interface(self): ts = self._create_time_series() self.pm.add(ts) - with self.assertWarnsWith( - PendingDeprecationWarning, "get_data_interface will be replaced by get" + msg = 'get_data_interface is deprecated and will be removed in PyNWB 4.0. Use get instead.' + with self.assertWarnsWith(warn_type=DeprecationWarning, + exc_msg=msg ): tmp = self.pm.get_data_interface("test_ts") - self.assertIs(tmp, ts) + self.assertIs(tmp, ts) def test_deprecated_get_container(self): ts = self._create_time_series() self.pm.add(ts) - with self.assertWarnsWith( - PendingDeprecationWarning, "get_container will be replaced by get" + msg = 'get_container is deprecated and will be removed in PyNWB 4.0. Use get instead.' + with self.assertWarnsWith(warn_type=DeprecationWarning, + exc_msg=msg ): tmp = self.pm.get_container("test_ts") - self.assertIs(tmp, ts) + self.assertIs(tmp, ts) def test_getitem(self): """Test adding a data interface to a ProcessingModule and retrieving it using __getitem__(...).""" diff --git a/tests/unit/test_ecephys.py b/tests/unit/test_ecephys.py index d70953c12..362e0910a 100644 --- a/tests/unit/test_ecephys.py +++ b/tests/unit/test_ecephys.py @@ -290,8 +290,19 @@ def test_init(self): num = [3, 4] peak_over_rms = [5.3, 6.3] - with self.assertWarnsWith(DeprecationWarning, 'use pynwb.misc.Units or NWBFile.units instead'): - cc = Clustering(description='description', num=num, peak_over_rms=peak_over_rms, times=times) + error_msg = "The Clustering neurodata type is deprecated. Use pynwb.misc.Units or NWBFile.units instead" + kwargs = dict(description='description', + num=num, + peak_over_rms=peak_over_rms, + times=times) + with self.assertRaisesWith(ValueError, error_msg): + cc = Clustering(**kwargs) + + # create object in construct mode, modeling the behavior of the ObjectMapper on read + # no error or warning should be raised + cc = Clustering.__new__(Clustering, in_construct_mode=True) + cc.__init__(**kwargs) + self.assertEqual(cc.description, 'description') self.assertEqual(cc.num, num) self.assertEqual(cc.peak_over_rms, peak_over_rms) @@ -304,19 +315,28 @@ def test_init(self): times = [1.3, 2.3] num = [3, 4] peak_over_rms = [5.3, 6.3] - with self.assertWarnsWith(DeprecationWarning, 'use pynwb.misc.Units or NWBFile.units instead'): - cc = Clustering(description='description', num=num, peak_over_rms=peak_over_rms, times=times) + + # create object in construct mode, modeling the behavior of the ObjectMapper on read + cc = Clustering.__new__(Clustering, + container_source=None, + parent=None, + in_construct_mode=True) + cc.__init__('description', num, peak_over_rms, times) means = [[7.3, 7.3]] stdevs = [[8.3, 8.3]] + error_msg = "The ClusterWaveforms neurodata type is deprecated. Use pynwb.misc.Units or NWBFile.units instead" + with self.assertRaisesWith(ValueError, error_msg): + cw = ClusterWaveforms(cc, 'filtering', means, stdevs) + + # create object in construct mode, modeling the behavior of the ObjectMapper on read + # no error or warning should be raised + cw = ClusterWaveforms.__new__(ClusterWaveforms, + container_source=None, + parent=None, + in_construct_mode=True) + cw.__init__(cc, 'filtering', means, stdevs) - with self.assertWarnsWith(DeprecationWarning, 'use pynwb.misc.Units or NWBFile.units instead'): - cw = ClusterWaveforms( - clustering_interface=cc, - waveform_filtering='filtering', - waveform_mean=means, - waveform_sd=stdevs - ) self.assertEqual(cw.clustering_interface, cc) self.assertEqual(cw.waveform_filtering, 'filtering') self.assertEqual(cw.waveform_mean, means) diff --git a/tests/unit/test_file.py b/tests/unit/test_file.py index 9465a6f23..c76abd7f5 100644 --- a/tests/unit/test_file.py +++ b/tests/unit/test_file.py @@ -126,12 +126,13 @@ def test_access_group_after_io(self): remove_test_file("electrodes_mwe.nwb") - def test_access_processing(self): + def test_access_processing_with_modules(self): self.nwbfile.create_processing_module('test_mod', 'test_description') - # test deprecate .modules - with self.assertWarnsWith(DeprecationWarning, 'NWBFile.modules has been replaced by NWBFile.processing.'): - modules = self.nwbfile.modules['test_mod'] - self.assertIs(self.nwbfile.processing['test_mod'], modules) + + # create object with deprecated argument + msg = "'NWBFile' object has no attribute 'modules'" + with self.assertRaisesWith(AttributeError, msg): + self.nwbfile.modules['test_mod'] def test_epoch_tags(self): tags1 = ['t1', 't2'] @@ -175,11 +176,8 @@ def test_add_stimulus(self): def test_add_stimulus_timeseries_arg(self): """Test nwbfile.add_stimulus using the deprecated 'timeseries' keyword argument""" - msg = ( - "The 'timeseries' keyword argument is deprecated and will be removed in PyNWB 3.0. " - "Use the 'stimulus' argument instead." - ) - with self.assertWarnsWith(DeprecationWarning, msg): + msg = ("NWBFile.add_stimulus: missing argument 'stimulus', unrecognized argument: 'timeseries'") + with self.assertRaisesWith(TypeError, msg): self.nwbfile.add_stimulus( timeseries=TimeSeries( name='test_ts', @@ -188,17 +186,12 @@ def test_add_stimulus_timeseries_arg(self): timestamps=[0.0, 0.1, 0.2, 0.3, 0.4, 0.5] ) ) - self.assertEqual(len(self.nwbfile.stimulus), 1) def test_add_stimulus_no_stimulus_arg(self): """Test nwbfile.add_stimulus using the deprecated 'timeseries' keyword argument""" - msg = ( - "The 'stimulus' keyword argument is required. The 'timeseries' keyword argument can be " - "provided for backwards compatibility but is deprecated in favor of 'stimulus' and will be " - "removed in PyNWB 3.0." - ) - with self.assertRaisesWith(ValueError, msg): - self.nwbfile.add_stimulus(None) + msg = ("NWBFile.add_stimulus: missing argument 'stimulus'") + with self.assertRaisesWith(TypeError, msg): + self.nwbfile.add_stimulus() self.assertEqual(len(self.nwbfile.stimulus), 0) def test_add_stimulus_dynamic_table(self): @@ -542,6 +535,21 @@ def test_multi_publications(self): related_publications=('pub1', 'pub2')) self.assertTupleEqual(self.nwbfile.related_publications, ('pub1', 'pub2')) + def test_ec_electrodes_deprecation(self): + nwbfile = NWBFile('a', 'b', datetime.now(tzlocal())) + device = nwbfile.create_device('a') + elecgrp = nwbfile.create_electrode_group('name', 'desc', device=device, location='a') + nwbfile.add_electrode(location='loc1', group=elecgrp, id=0) + + # test that NWBFile.ec_electrodes property warns or errors + msg = "'NWBFile' object has no attribute 'ec_electrodes'" + with self.assertRaisesWith(AttributeError, msg): + nwbfile.ec_electrodes + + # test that NWBFile.ec_electrode_groups warns or errors + msg = "'NWBFile' object has no attribute 'ec_electrode_groups'" + with self.assertRaisesWith(AttributeError, msg): + nwbfile.ec_electrode_groups class SubjectTest(TestCase): def setUp(self): diff --git a/tests/unit/test_icephys.py b/tests/unit/test_icephys.py index 642aa5411..78cf3eb50 100644 --- a/tests/unit/test_icephys.py +++ b/tests/unit/test_icephys.py @@ -39,26 +39,31 @@ class NWBFileICEphys(TestCase): def setUp(self): self.icephys_electrode = GetElectrode() - def test_sweep_table_depractation_warn(self): - msg = ("Use of SweepTable is deprecated. Use the IntracellularRecordingsTable " - "instead. See also the NWBFile.add_intracellular_recordings function.") - with self.assertWarnsWith(DeprecationWarning, msg): - _ = NWBFile( - session_description='NWBFile icephys test', - identifier='NWB123', # required - session_start_time=datetime(2017, 4, 3, 11, tzinfo=tzlocal()), - ic_electrodes=[self.icephys_electrode, ], - sweep_table=SweepTable()) - - def test_ic_electrodes_parameter_deprecation(self): - # Make sure we warn when using the ic_electrodes parameter on NWBFile - msg = "Use of the ic_electrodes parameter is deprecated. Use the icephys_electrodes parameter instead" - with self.assertWarnsWith(DeprecationWarning, msg): - _ = NWBFile( - session_description='NWBFile icephys test', + def test_sweep_table_deprecation_warn(self): + msg = ("SweepTable is deprecated. Use the IntracellularRecordingsTable instead. " + "See also the NWBFile.add_intracellular_recordings function.") + + with self.assertRaisesWith(ValueError, msg): + SweepTable() + + # create object in construct mode, modeling the behavior of the ObjectMapper on read + # should not raise error or warning + sweepT = SweepTable.__new__(SweepTable, in_construct_mode=True) + sweepT.__init__() + + kwargs = dict(session_description='NWBFile icephys test', identifier='NWB123', # required session_start_time=datetime(2017, 4, 3, 11, tzinfo=tzlocal()), - ic_electrodes=[self.icephys_electrode, ]) + icephys_electrodes=[self.icephys_electrode, ], + sweep_table=sweepT) + + with self.assertRaisesWith(ValueError, msg): + NWBFile(**kwargs) + + # create object in construct mode, modeling the behavior of the ObjectMapper on read + # should not warn or error + nwbfile = NWBFile.__new__(NWBFile, in_construct_mode=True) + nwbfile.__init__(**kwargs) def test_icephys_electrodes_parameter(self): nwbfile = NWBFile( @@ -68,53 +73,18 @@ def test_icephys_electrodes_parameter(self): icephys_electrodes=[self.icephys_electrode, ]) self.assertEqual(nwbfile.get_icephys_electrode('test_iS'), self.icephys_electrode) - def test_add_ic_electrode_deprecation(self): - # Make sure we warn when using the add_ic_electrodes parameter on NWBFile - nwbfile = NWBFile( - session_description='NWBFile icephys test', - identifier='NWB123', # required - session_start_time=datetime(2017, 4, 3, 11, tzinfo=tzlocal())) - - msg = "NWBFile.add_ic_electrode has been replaced by NWBFile.add_icephys_electrode." - with self.assertWarnsWith(DeprecationWarning, msg): - nwbfile.add_ic_electrode(self.icephys_electrode) - def test_ic_electrodes_attribute_deprecation(self): nwbfile = NWBFile( session_description='NWBFile icephys test', identifier='NWB123', # required session_start_time=datetime(2017, 4, 3, 11, tzinfo=tzlocal()), icephys_electrodes=[self.icephys_electrode, ]) - # make sure NWBFile.ic_electrodes property warns - - msg = "NWBFile.ic_electrodes has been replaced by NWBFile.icephys_electrodes." - with self.assertWarnsWith(DeprecationWarning, msg): - nwbfile.ic_electrodes # make sure NWBFile.get_ic_electrode warns - msg = "NWBFile.get_ic_electrode has been replaced by NWBFile.get_icephys_electrode." - with self.assertWarnsWith(DeprecationWarning, msg): + msg = "'NWBFile' object has no attribute 'get_ic_electrode'" + with self.assertRaisesWith(AttributeError, msg): nwbfile.get_ic_electrode(self.icephys_electrode.name) - def test_create_ic_electrode_deprecation(self): - nwbfile = NWBFile( - session_description='NWBFile icephys test', - identifier='NWB123', # required - session_start_time=datetime(2017, 4, 3, 11, tzinfo=tzlocal())) - device = Device(name='device_name') - msg = "NWBFile.create_ic_electrode has been replaced by NWBFile.create_icephys_electrode." - with self.assertWarnsWith(DeprecationWarning, msg): - nwbfile.create_ic_electrode( - name='test_iS', - device=device, - description='description', - slice='slice', - seal='seal', - location='location', - resistance='resistance', - filtering='filtering', - initial_access_resistance='initial_access_resistance') - class IntracellularElectrodeConstructor(TestCase): diff --git a/tests/unit/test_icephys_metadata_tables.py b/tests/unit/test_icephys_metadata_tables.py index b6ccf4c90..705c91f5d 100644 --- a/tests/unit/test_icephys_metadata_tables.py +++ b/tests/unit/test_icephys_metadata_tables.py @@ -1102,15 +1102,28 @@ def test_deprecate_simultaneous_recordings_on_add_stimulus(self): electrode = self.__add_electrode(nwbfile, device) stimulus = self.__get_stimulus(electrode=electrode) response = self.__get_response(electrode=electrode) - # Make sure we warn if sweeptable is added on add_stimulus - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter("always") # Trigger all warnings + stimulus2 = VoltageClampStimulusSeries( + name="ccss2", + data=[1, 2, 3, 4, 5], + starting_time=123.6, + rate=10e3, + electrode=electrode, + gain=0.02, + sweep_number=np.uint64(16) + ) + + msg = ("SweepTable is deprecated. Use the IntracellularRecordingsTable instead. " + "See also the NWBFile.add_intracellular_recordings function.") + with self.assertRaisesWith(ValueError, msg): nwbfile.add_stimulus(stimulus, use_sweep_table=True) - self.assertEqual(len(w), 1) - assert issubclass(w[-1].category, DeprecationWarning) - # make sure we don't trigger the same deprecation warning twice - nwbfile.add_acquisition(response, use_sweep_table=True) - self.assertEqual(len(w), 1) + + # test that adding a stimulus does not error when in construct mode + nwbfile._in_construct_mode = True + nwbfile.add_stimulus(stimulus2, use_sweep_table=True) + + # make sure we don't trigger the same deprecation warning twice + nwbfile.add_acquisition(response, use_sweep_table=True) + nwbfile._in_construct_mode = False def test_deprecate_sweeptable_on_add_stimulus_template(self): """ @@ -1140,16 +1153,18 @@ def test_deprecate_sweeptable_on_add_stimulus_template(self): gain=0.02, sweep_number=np.uint64(15) ) - with warnings.catch_warnings(record=True) as w: + + msg = ("SweepTable is deprecated. Use the IntracellularRecordingsTable instead. " + "See also the NWBFile.add_intracellular_recordings function.") + with self.assertRaisesWith(ValueError, msg): nwbfile.add_stimulus_template(local_stimulus, use_sweep_table=True) - self.assertEqual(len(w), 1) - assert issubclass(w[-1].category, DeprecationWarning) - self.assertEqual(str(w[-1].message), - "Use of SweepTable is deprecated. Use the IntracellularRecordingsTable " - "instead. See also the NWBFile.add_intracellular_recordings function.") - # make sure we don't trigger the same deprecation warning twice - nwbfile.add_stimulus_template(local_stimulus2, use_sweep_table=True) - self.assertEqual(len(w), 1) + # NOTE - the sweep table creation will error but the stimulus template will still be added + + # create object in construct mode, modeling the behavior of the ObjectMapper on read + # should not trigger any warnings or errors + nwbfile._in_construct_mode = True + nwbfile.add_stimulus_template(local_stimulus2, use_sweep_table=True) + nwbfile._in_construct_mode = False def test_deprecate_sweepstable_on_add_acquistion(self): """ @@ -1160,49 +1175,84 @@ def test_deprecate_sweepstable_on_add_acquistion(self): electrode = self.__add_electrode(nwbfile, device) stimulus = self.__get_stimulus(electrode=electrode) response = self.__get_response(electrode=electrode) - # Make sure we warn if sweeptable is added on add_stimulus - with warnings.catch_warnings(record=True) as w: + response2 = VoltageClampSeries( + name='vcs2', + data=[0.1, 0.2, 0.3, 0.4, 0.5], + conversion=1e-12, + resolution=np.nan, + starting_time=123.6, + rate=20e3, + electrode=electrode, + gain=0.02, + capacitance_slow=100e-12, + resistance_comp_correction=70.0, + sweep_number=np.uint64(16) + ) + + msg = ("SweepTable is deprecated. Use the IntracellularRecordingsTable instead. " + "See also the NWBFile.add_intracellular_recordings function.") + + # check we raise an error when using the sweeptable with add_acquisition + with self.assertRaisesWith(ValueError, msg): nwbfile.add_acquisition(response, use_sweep_table=True) - self.assertEqual(len(w), 1) - assert issubclass(w[-1].category, DeprecationWarning) - self.assertEqual(str(w[-1].message), - "Use of SweepTable is deprecated. Use the IntracellularRecordingsTable " - "instead. See also the NWBFile.add_intracellular_recordings function.") - # make sure we don't trigger the same deprecation warning twice - nwbfile.add_stimulus(stimulus, use_sweep_table=True) - self.assertEqual(len(w), 1) + # NOTE - the sweep table creation will error but the acquisition will still be added + + # should not trigger error or warning in construct mode + nwbfile._in_construct_mode = True + nwbfile.add_acquisition(response2, use_sweep_table=True) + + # make sure we don't trigger the same deprecation warning twice + nwbfile.add_stimulus(stimulus, use_sweep_table=True) + nwbfile._in_construct_mode = False + def test_deprecate_sweepstable_on_init(self): """ Test that warnings are raised if the user tries to use a sweeps table """ from pynwb.icephys import SweepTable - with warnings.catch_warnings(record=True) as w: - nwbfile = NWBFile( - session_description='my first synthetic recording', + sweepT = SweepTable.__new__(SweepTable, in_construct_mode=True) + sweepT.__init__() + + kwargs = dict(session_description='my first synthetic recording', identifier='EXAMPLE_ID', session_start_time=datetime.now(tzlocal()), - sweep_table=SweepTable() - ) - device = self.__add_device(nwbfile) - electrode = self.__add_electrode(nwbfile, device) - stimulus = self.__get_stimulus(electrode=electrode) - self.assertEqual(len(w), 1) - assert issubclass(w[-1].category, DeprecationWarning) - # make sure we don't trigger the same deprecation warning twice - nwbfile.add_stimulus(stimulus, use_sweep_table=True) - self.assertEqual(len(w), 1) + sweep_table=sweepT) + + # check we raise an error when using the sweeptable argument on init + msg = ("SweepTable is deprecated. Use the IntracellularRecordingsTable instead. " + "See also the NWBFile.add_intracellular_recordings function.") + with self.assertRaisesWith(ValueError, msg): + nwbfile = NWBFile(**kwargs) + + # create object in construct mode, modeling the behavior of the ObjectMapper on read + # should not trigger warning or error + nwbfile = NWBFile.__new__(NWBFile, in_construct_mode=True) + nwbfile.__init__(**kwargs) + + # make sure we don't trigger the same deprecation warning twice + device = self.__add_device(nwbfile) + electrode = self.__add_electrode(nwbfile, device) + stimulus = self.__get_stimulus(electrode=electrode) + nwbfile.add_stimulus(stimulus, use_sweep_table=True) - def test_deprectation_icephys_filtering_on_init(self): - with warnings.catch_warnings(record=True) as w: - nwbfile = NWBFile( - session_description='my first synthetic recording', + def test_deprecation_icephys_filtering_on_init(self): + kwargs = dict(session_description='my first synthetic recording', identifier='EXAMPLE_ID', session_start_time=datetime.now(tzlocal()), - icephys_filtering='test filtering' - ) - assert issubclass(w[-1].category, DeprecationWarning) - self.assertEqual(nwbfile.icephys_filtering, 'test filtering') + icephys_filtering='test filtering') + msg = ("Use of icephys_filtering is deprecated and will be removed in PyNWB 4.0. " + "Use the IntracellularElectrode.filtering field instead") + + with self.assertRaisesWith(ValueError, msg): + nwbfile = NWBFile(**kwargs) + + # create object in construct mode, modeling the behavior of the ObjectMapper on read + nwbfile = NWBFile.__new__(NWBFile, in_construct_mode=True) + with self.assertWarnsWith(warn_type=UserWarning, exc_msg=msg): + nwbfile.__init__(**kwargs) + + self.assertEqual(nwbfile.icephys_filtering, 'test filtering') def test_icephys_filtering_roundtrip(self): # create the base file @@ -1212,18 +1262,24 @@ def test_icephys_filtering_roundtrip(self): session_start_time=datetime.now(tzlocal()) ) # set the icephys_filtering attribute and make sure we get a deprecation warning - with warnings.catch_warnings(record=True) as w: + msg = ("Use of icephys_filtering is deprecated and will be removed in PyNWB 4.0. " + "Use the IntracellularElectrode.filtering field instead") + with self.assertRaisesWith(ValueError, msg): + nwbfile.icephys_filtering = 'test filtering' + + # create object in construct mode, modeling the behavior of the ObjectMapper on read + nwbfile._in_construct_mode = True + with self.assertWarnsWith(warn_type=UserWarning, exc_msg=msg): nwbfile.icephys_filtering = 'test filtering' - assert issubclass(w[-1].category, DeprecationWarning) - # write the test fil + nwbfile._in_construct_mode = False + + # write the test file with NWBHDF5IO(self.path, 'w') as io: io.write(nwbfile) # read the test file and confirm icephys_filtering has been written with NWBHDF5IO(self.path, 'r') as io: - with warnings.catch_warnings(record=True) as w: + with self.assertWarnsWith(UserWarning, msg): infile = io.read() - self.assertEqual(len(w), 1) # make sure a warning is being raised - assert issubclass(w[0].category, DeprecationWarning) # make sure it is a deprecation warning self.assertEqual(infile.icephys_filtering, 'test filtering') # make sure the value is set def test_get_icephys_meta_parent_table(self): diff --git a/tests/unit/test_image.py b/tests/unit/test_image.py index e41475f8f..11a2b7a8d 100644 --- a/tests/unit/test_image.py +++ b/tests/unit/test_image.py @@ -154,7 +154,7 @@ def test_external_file_with_incorrect_starting_frame_construct_mode(self): "ImageSeries 'test_iS': The number of frame indices in " "'starting_frame' should have the same length as 'external_file'." ) - # Create the image series in construct mode, modelling the behavior + # Create the image series in construct mode, modeling the behavior # of the ObjectMapper on read while avoiding having to create, write, # and read and entire NWB file obj = ImageSeries.__new__(ImageSeries, @@ -243,17 +243,23 @@ def test_external_file_default_format(self): """Test that format is set to 'external' if not provided, when external_file is provided.""" msg = ( "ImageSeries 'test_iS': The value for 'format' has been changed to 'external'. " - "Setting a default value for 'format' is deprecated and will be changed to " - "raising a ValueError in the next major release." + "If an external file is detected, setting a value for " + "'format' other than 'external' is deprecated." ) - with self.assertWarnsWith(DeprecationWarning, msg): - iS = ImageSeries( - name="test_iS", + kwargs = dict(name="test_iS", external_file=["external_file", "external_file2"], unit="n.a.", starting_frame=[0, 10], - rate=0.2, - ) + rate=0.2,) + + # create object with deprecated argument + with self.assertRaisesWith(ValueError, msg): + ImageSeries(**kwargs) + + # create object in construct mode, modeling the behavior of the ObjectMapper on read + iS = ImageSeries.__new__(ImageSeries, in_construct_mode=True) + with self.assertWarnsWith(warn_type=UserWarning, exc_msg=msg): + iS.__init__(**kwargs) self.assertEqual(iS.format, "external") def test_external_file_with_correct_format(self): @@ -309,6 +315,23 @@ def test_external_file_with_data_construct_mode(self): rate=0.2, ) + def test_bits_per_pixel_deprecation(self): + msg = "bits_per_pixel is deprecated" + kwargs = dict(name='test_iS', + unit='unit', + external_file=['external_file'], + starting_frame=[0], + format='external', + timestamps=[1., 2., 3.], + bits_per_pixel=8) + with self.assertRaisesWith(ValueError, msg): + ImageSeries(**kwargs) + + # create object in construct mode, modeling the behavior of the ObjectMapper on read + # no warning or error should be raised + iS = ImageSeries.__new__(ImageSeries, in_construct_mode=True) + iS.__init__(**kwargs) + class IndexSeriesConstructor(TestCase): @@ -329,23 +352,29 @@ def test_init(self): self.assertEqual(iS.unit, 'N/A') self.assertIs(iS.indexed_images, images) - def test_init_bad_unit(self): + def test_init_bad_unit(self): ts = TimeSeries( name='test_ts', data=[1, 2, 3], unit='unit', timestamps=[0.1, 0.2, 0.3] ) - msg = ("The 'unit' field of IndexSeries is fixed to the value 'N/A' starting in NWB 2.5. Passing " - "a different value for 'unit' will raise an error in PyNWB 3.0.") - with self.assertWarnsWith(PendingDeprecationWarning, msg): - iS = IndexSeries( - name='test_iS', + msg = ("The 'unit' field of IndexSeries is fixed to the value 'N/A'.") + kwargs = dict(name='test_iS', data=[1, 2, 3], unit='na', indexed_timeseries=ts, - timestamps=[0.1, 0.2, 0.3] - ) + timestamps=[0.1, 0.2, 0.3]) + + # create object with deprecated argument + with self.assertRaisesWith(ValueError, msg): + IndexSeries(**kwargs) + + # create object in construct mode, modeling the behavior of the ObjectMapper on read + # no warning or error should be raised + iS = IndexSeries.__new__(IndexSeries, in_construct_mode=True) + iS.__init__(**kwargs) + self.assertEqual(iS.unit, 'N/A') def test_init_indexed_ts(self): @@ -355,18 +384,31 @@ def test_init_indexed_ts(self): unit='unit', timestamps=[0.1, 0.2, 0.3] ) - msg = ("The indexed_timeseries field of IndexSeries is discouraged and will be deprecated in " - "a future version of NWB. Use the indexed_images field instead.") - with self.assertWarnsWith(PendingDeprecationWarning, msg): - iS = IndexSeries( - name='test_iS', + msg = ("The indexed_timeseries field of IndexSeries is deprecated. " + "Use the indexed_images field instead.") + kwargs = dict(name='test_iS', data=[1, 2, 3], unit='N/A', indexed_timeseries=ts, - timestamps=[0.1, 0.2, 0.3] - ) + timestamps=[0.1, 0.2, 0.3]) + + # create object with deprecated argument + with self.assertRaisesWith(ValueError, msg): + IndexSeries(**kwargs) + + # create object in construct mode, modeling the behavior of the ObjectMapper on read + # no warning or error should be raised + iS = IndexSeries.__new__(IndexSeries, in_construct_mode=True) + iS.__init__(**kwargs) + self.assertIs(iS.indexed_timeseries, ts) + def test_init_no_indexed_ts_or_timeseries(self): + msg = ("Either indexed_timeseries or indexed_images " + "must be provided when creating an IndexSeries.") + with self.assertRaisesWith(ValueError, msg): + IndexSeries(name='test_iS', data=[1, 2, 3], unit='N/A', timestamps=[0.1, 0.2, 0.3]) + class ImageMaskSeriesConstructor(TestCase): diff --git a/tests/unit/test_ophys.py b/tests/unit/test_ophys.py index 88bd24535..d484d9e01 100644 --- a/tests/unit/test_ophys.py +++ b/tests/unit/test_ophys.py @@ -131,11 +131,8 @@ def test_init(self): def test_manifold_deprecated(self): oc, device = self.set_up_dependencies() - msg = "The 'manifold' argument is deprecated in favor of 'origin_coords' and 'grid_spacing'." - with self.assertWarnsWith(DeprecationWarning, msg): - ImagingPlane( - name='test_imaging_plane', + kwargs = dict(name='test_imaging_plane', optical_channel=oc, description='description', device=device, @@ -143,16 +140,21 @@ def test_manifold_deprecated(self): imaging_rate=300., indicator='indicator', location='location', - manifold=(1, 1, (2, 2, 2)) - ) + manifold=(1, 1, (2, 2, 2))) + + # create object with deprecated argument + with self.assertRaisesWith(ValueError, msg): + ImagingPlane(**kwargs) + + # create object in construct mode, modeling the behavior of the ObjectMapper on read + # no warning or error should be raised + obj = ImagingPlane.__new__(ImagingPlane, in_construct_mode=True) + obj.__init__(**kwargs) def test_conversion_deprecated(self): oc, device = self.set_up_dependencies() - msg = "The 'conversion' argument is deprecated in favor of 'origin_coords' and 'grid_spacing'." - with self.assertWarnsWith(DeprecationWarning, msg): - ImagingPlane( - name='test_imaging_plane', + kwargs = dict(name='test_imaging_plane', optical_channel=oc, description='description', device=device, @@ -160,16 +162,21 @@ def test_conversion_deprecated(self): imaging_rate=300., indicator='indicator', location='location', - conversion=2.0 - ) + conversion=2.0) + + # create object with deprecated argument + with self.assertRaisesWith(ValueError, msg): + ImagingPlane(**kwargs) + + # create object in construct mode, modeling the behavior of the ObjectMapper on read + # no warning or error should be raised + obj = ImagingPlane.__new__(ImagingPlane, in_construct_mode=True) + obj.__init__(**kwargs) def test_unit_deprecated(self): oc, device = self.set_up_dependencies() - msg = "The 'unit' argument is deprecated in favor of 'origin_coords_unit' and 'grid_spacing_unit'." - with self.assertWarnsWith(DeprecationWarning, msg): - ImagingPlane( - name='test_imaging_plane', + kwargs = dict(name='test_imaging_plane', optical_channel=oc, description='description', device=device, @@ -178,8 +185,16 @@ def test_unit_deprecated(self): indicator='indicator', location='location', reference_frame='reference_frame', - unit='my_unit' - ) + unit='my_unit') + + # create object with deprecated argument + with self.assertRaisesWith(ValueError, msg): + ImagingPlane(**kwargs) + + # create object in construct mode, modeling the behavior of the ObjectMapper on read + # no warning or error should be raised + obj = ImagingPlane.__new__(ImagingPlane, in_construct_mode=True) + obj.__init__(**kwargs) class OnePhotonSeriesConstructor(TestCase): diff --git a/tests/unit/test_scratch.py b/tests/unit/test_scratch.py index 398ae2f78..7794fdee4 100644 --- a/tests/unit/test_scratch.py +++ b/tests/unit/test_scratch.py @@ -28,6 +28,42 @@ def test_constructor_list(self): self.assertListEqual(sd.data, [1, 2, 3, 4]) self.assertEqual(sd.description, 'test scratch') + def test_scratch_notes_deprecation(self): + msg = ("The `notes` argument of ScratchData.__init__ has been deprecated and will " + "be removed in PyNWB 4.0. Use description instead.") + with self.assertRaisesWith(ValueError, msg): + ScratchData(name='test', data=[1, 2, 3, 4, 5], notes='test notes') + + # create object in construct mode, modeling the behavior of the ObjectMapper on read + # should not raise error or warning + data = ScratchData.__new__(ScratchData, in_construct_mode=True) + data.__init__(name='test', data=[1, 2, 3, 4, 5], notes='test notes') + self.assertEqual(data.description, 'test notes') + + # test notes property + msg = ("Use of ScratchData.notes has been deprecated and will be removed in PyNWB 4.0. " + "Use ScratchData.description instead.") + + with self.assertWarnsWith(DeprecationWarning, msg): + data.notes + + # test notes setter + data = ScratchData(name='test', data=[1, 2, 3, 4, 5], description='test description') + with self.assertRaisesWith(ValueError, msg): + data.notes = 'test notes' + + def test_scratch_no_description(self): + with self.assertRaisesWith(ValueError, 'ScratchData.description is required.'): + ScratchData(name='test', data=[1, 2, 3, 4, 5]) + + def test_scratch_notes_and_description(self): + msg = ('Cannot provide both notes and description to ScratchData.__init__. The description ' + 'argument is recommended.') + data = ScratchData.__new__(ScratchData, in_construct_mode=True) + with self.assertRaisesWith(ValueError, msg): + data.__init__(name='test', data=[1, 2, 3, 4, 5], notes='test notes', + description='test description') + def test_add_scratch_int(self): ret = self.nwbfile.add_scratch(2, name='test', description='test data') self.assertIsInstance(ret, ScratchData) @@ -70,6 +106,14 @@ def test_add_scratch_dataframe_no_description(self): with self.assertRaisesWith(ValueError, msg): self.nwbfile.add_scratch(data, name='test') + def test_add_scratch_notes_and_description(self): + error_msg = 'Cannot call add_scratch with (notes or table_description) and description' + warning_msg = ("Use of the `notes` or `table_description` argument is deprecated and will be " + "removed in PyNWB 4.0. Use the `description` argument instead.") + with self.assertWarnsWith(DeprecationWarning, warning_msg): + with self.assertRaisesWith(ValueError, error_msg): + self.nwbfile.add_scratch([1, 2, 3, 4], name='test', description='test data', notes='test notes') + def test_add_scratch_container(self): data = TimeSeries(name='test_ts', data=[1, 2, 3, 4, 5], unit='unit', timestamps=[1.1, 1.2, 1.3, 1.4, 1.5]) self.nwbfile.add_scratch(data) @@ -106,6 +150,22 @@ def test_add_scratch_dynamictable(self): self.nwbfile.add_scratch(data) self.assertIs(self.nwbfile.get_scratch('test', convert=False), data) self.assertIs(self.nwbfile.scratch['test'], data) + + def test_add_scratch_notes_deprecation(self): + msg = ("Use of the `notes` or `table_description` argument is deprecated and will be removed in PyNWB 4.0. " + "Use the `description` argument instead.") + with self.assertWarnsWith(DeprecationWarning, msg): + self.nwbfile.add_scratch(name='test', data=[1, 2, 3, 4, 5], notes='test notes') + self.assertEqual(self.nwbfile.scratch['test'].description, 'test notes') + + def test_add_scratch_table_description_deprecation(self): + msg = ('Use of the `notes` or `table_description` argument is deprecated and will be removed in PyNWB 4.0. ' + 'Use the `description` argument instead.') + df = pd.DataFrame(data={'col1': [1, 2, 3, 4], 'col2': ['a', 'b', 'c', 'd']}) + with self.assertWarnsWith(DeprecationWarning, msg): + self.nwbfile.add_scratch(name='test', data=df, + table_description='test table_description') + self.assertEqual(self.nwbfile.scratch['test'].description, 'test table_description') def test_get_scratch_list_convert_false(self): self.nwbfile.add_scratch([1, 2, 3, 4], name='test', description='test description') From f02e61b4bad762703ea443c363f172a16d3bac52 Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Fri, 3 Jan 2025 12:02:06 -0500 Subject: [PATCH 5/8] Upgrade validation methods (#1911) * reconcile io path validator differences * make io and path output the same * update tests for io validation * add test that io and path output is same * update tests for io output with status * add validate helper function and fix status tracking * update arguments and status checks * fix test comparing io and path outputs * update error message for io inputs * add tests to compare all io and path outputs * remove script for testing * add initial json export option * update validator inputs and outputs, remove hdf5io references * update tests for non status output * add tests for json validator output * move pynwb.validate into validation module * update json report * add validation entry point * separate cli and validation function files * update validation tutorial * move get_backend to pynwb init * update example validation for new io behavior * update ruff ignores * fix test comments * update CHANGELOG * add tests for _get_backend * update backend imports for optional zarr * fix test name * update test filename * close io after validation * fix test assertion * add condition for ros3 validation * Update CHANGELOG.md * Apply suggestions from code review Co-authored-by: Ryan Ly * update cli args and docs * fix formatting * update json-file-path argname * update extension tutorial * Apply suggestions from code review Co-authored-by: Ryan Ly * warn for positional args in validation --------- Co-authored-by: Ryan Ly --- CHANGELOG.md | 10 + docs/gallery/general/extensions.py | 5 +- docs/source/validation.rst | 28 ++- pyproject.toml | 5 +- src/pynwb/__init__.py | 23 ++- src/pynwb/testing/testh5io.py | 8 +- src/pynwb/validate.py | 252 ----------------------- src/pynwb/validation.py | 198 ++++++++++++++++++ src/pynwb/validation_cli.py | 84 ++++++++ test.py | 14 +- tests/integration/ros3/test_ros3.py | 7 +- tests/integration/utils/test_io_utils.py | 27 ++- tests/validation/test_validate.py | 146 +++++++------ 13 files changed, 465 insertions(+), 342 deletions(-) delete mode 100644 src/pynwb/validate.py create mode 100644 src/pynwb/validation.py create mode 100644 src/pynwb/validation_cli.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 503df39e6..b416b5025 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,13 @@ ## PyNWB 3.0.0 (Upcoming) +### Breaking changes +- The validation methods have been updated with multiple breaking changes. @stephprince [#1911](https://github.com/NeurodataWithoutBorders/pynwb/pull/1911) + - The behavior of `pynwb.validate(io=...)` now matches the behavior of `pynwb.validate(path=...)`. In previous pynwb versions, `pynwb.validate(io=...)` did not use the cached namespaces during validation. To obtain the same behavior as in previous versions, you can update the function call to `pynwb.validate(io=..., use_cached_namespaces=False)` + - `pynwb.validate` will return only a list of validation errors instead of a tuple: (list of validation_errors, status code) + - The validate module has been renamed to `validation.py`. The validate method can be + imported using `import pynwb; pynwb.validate` or `from pynwb import validate` + ### Deprecations - The following deprecated classes will now raise errors when creating new instances of these classes: ``ClusteringWaveforms``, ``Clustering``, ``SweepTable``. Reading files using these data types will continue to be supported. - The following methods and arguments have been deprecated: @@ -30,6 +37,9 @@ - Added support for `model_number`, `model_name`, and `serial_number` fields to `Device`. @stephprince [#1997](https://github.com/NeurodataWithoutBorders/pynwb/pull/1997) - Deprecated `EventWaveform` neurodata type. @rly [#1940](https://github.com/NeurodataWithoutBorders/pynwb/pull/1940) - Deprecated `ImageMaskSeries` neurodata type. @rly [#1941](https://github.com/NeurodataWithoutBorders/pynwb/pull/1941) +- Added enhancements to the validation CLI. @stephprince [#1911](https://github.com/NeurodataWithoutBorders/pynwb/pull/1911) + - Added an entry point for the validation module. You can now use `pynwb-validate "file.nwb"`. + - Added the `--json-outpath-path` CLI argument to output validation results in a machine readable format. - Removed python 3.8 support, added python 3.13 support. @stephprince [#2007](https://github.com/NeurodataWithoutBorders/pynwb/pull/2007) - Added warnings when using positional arguments in `Container` constructor methods. Positional arguments will raise errors in the next major release. @stephprince [#1972](https://github.com/NeurodataWithoutBorders/pynwb/pull/1972) diff --git a/docs/gallery/general/extensions.py b/docs/gallery/general/extensions.py index ddf9159c7..3e41e7a91 100644 --- a/docs/gallery/general/extensions.py +++ b/docs/gallery/general/extensions.py @@ -37,8 +37,7 @@ ns_builder = NWBNamespaceBuilder( "Extension for use in my Lab", "mylab", version="0.1.0" ) - -ns_builder.include_type("ElectricalSeries", namespace="core") +ns_builder.include_namespace("core") ext = NWBGroupSpec( "A custom ElectricalSeries for my lab", @@ -264,7 +263,7 @@ def __init__(self, **kwargs): ext_source = name + ".extensions.yaml" ns_builder = NWBNamespaceBuilder(name + " extensions", name, version="0.1.0") -ns_builder.include_type("NWBDataInterface", namespace="core") +ns_builder.include_namespace("core") potato = NWBGroupSpec( neurodata_type_def="Potato", diff --git a/docs/source/validation.rst b/docs/source/validation.rst index 73c138127..6e3ac7d2d 100644 --- a/docs/source/validation.rst +++ b/docs/source/validation.rst @@ -3,12 +3,21 @@ Validating NWB files ==================== +.. note:: + + The pynwb validation CLI checks for structural compliance of NWB files with the NWB schema. + It is recommended to use the `NWBInspector CLI `_ + for more comprehensive validation of both structural compliance with the NWB schema and + compliance of data with NWB best practices. The NWBInspector runs both PyNWB validation as + described here and additional data checks. + + Validating NWB files is handled by a command-line tool available in :py:mod:`~pynwb`. The validator can be invoked like so: .. code-block:: bash - python -m pynwb.validate test.nwb + pynwb-validate test.nwb If the file contains no NWB extensions, then this command will validate the file ``test.nwb`` against the *core* NWB specification. On success, the output will be: @@ -29,7 +38,7 @@ within the ``test.nwb`` file. .. code-block:: bash - python -m pynwb.validate -n ndx-my-extension test.nwb + pynwb-validate -n ndx-my-extension test.nwb To validate against the version of the **core** NWB specification that is included with the installed version of PyNWB, use the ``--no-cached-namespace`` flag. This can be useful in validating files against newer or older versions @@ -37,27 +46,28 @@ of the **core** NWB specification that are installed with newer or older version .. code-block:: bash - python -m pynwb.validate --no-cached-namespace test.nwb + pynwb-validate --no-cached-namespace test.nwb .. Last updated 8/13/2021 .. code-block:: text - $python -m pynwb.validate --help - usage: validate.py [-h] [-n NS] [-lns] [--cached-namespace | --no-cached-namespace] paths [paths ...] + $pynwb-validate --help + usage: pynwb-validate [-h] [-lns] [-n NS] [--json-output-path JSON_OUTPUT_PATH] [--no-cached-namespace] paths [paths ...] Validate an NWB file positional arguments: paths NWB file paths - optional arguments: + options: -h, --help show this help message and exit - -n NS, --ns NS the namespace to validate against -lns, --list-namespaces List the available namespaces and exit. - --cached-namespace Use the cached namespace (default). + -n NS, --ns NS the namespace to validate against + --json-output-path JSON_OUTPUT_PATH + Write json output to this location. --no-cached-namespace - Don't use the cached namespace. + Use the namespaces installed by PyNWB (true) or use the cached namespaces (false; default). If --ns is not specified, validate against all namespaces in the NWB file. diff --git a/pyproject.toml b/pyproject.toml index 122fbd05b..755d3384e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,6 +48,9 @@ dynamic = ["version"] # the build backend will compute the version dynamically f "Homepage" = "https://github.com/NeurodataWithoutBorders/pynwb" "Bug Tracker" = "https://github.com/NeurodataWithoutBorders/pynwb/issues" +[project.scripts] +pynwb-validate = "pynwb.validation_cli:validation_cli" + [tool.hatch.version] source = "vcs" @@ -111,7 +114,7 @@ line-length = 120 "docs/gallery/*" = ["E402", "T201"] "src/*/__init__.py" = ["F401"] "src/pynwb/_version.py" = ["T201"] -"src/pynwb/validate.py" = ["T201"] +"src/pynwb/validation_cli.py" = ["T201"] "scripts/*" = ["T201"] # "test_gallery.py" = ["T201"] # Uncomment when test_gallery.py is created diff --git a/src/pynwb/__init__.py b/src/pynwb/__init__.py index 5f8fec157..409c28c34 100644 --- a/src/pynwb/__init__.py +++ b/src/pynwb/__init__.py @@ -23,7 +23,7 @@ CORE_NAMESPACE = 'core' from .spec import NWBDatasetSpec, NWBGroupSpec, NWBNamespace # noqa E402 -from .validate import validate # noqa: F401, E402 +from .validation import validate # noqa: F401, E402 try: # see https://effigies.gitlab.io/posts/python-packaging-2023/ @@ -344,6 +344,27 @@ def get_sum(self, a, b): return __TYPE_MAP.get_dt_container_cls(neurodata_type, namespace) +@docval({'name': 'path', 'type': str, 'doc': 'Path to the NWB file which can be an HDF5 file or a Zarr store.'}, + {"name": "method", "type": str, "doc": "the method to use when opening the file", 'default': None}, + is_method=False) +def _get_backend(path: str, method: str = None): + if method == "ros3": + return NWBHDF5IO # TODO - add additional conditions for other streaming methods + + try: + from hdmf_zarr import NWBZarrIO + backend_io_classes = [NWBHDF5IO, NWBZarrIO] + except ImportError: + backend_io_classes = [NWBHDF5IO] + + backend_options = [b for b in backend_io_classes if b.can_read(path=path)] + if len(backend_options) == 0: + raise ValueError(f"Could not find an IO to read the file '{path}'. If you are trying to read " + f"a Zarr file, make sure you have hdmf-zarr installed.") + else: + return backend_options[0] + + class NWBHDF5IO(_HDF5IO): @staticmethod diff --git a/src/pynwb/testing/testh5io.py b/src/pynwb/testing/testh5io.py index 7234e79f5..265cd5944 100644 --- a/src/pynwb/testing/testh5io.py +++ b/src/pynwb/testing/testh5io.py @@ -163,12 +163,12 @@ def getContainer(self, nwbfile): def validate(self): """ Validate the created files """ if os.path.exists(self.filename): - errors, _ = pynwb_validate(paths=[self.filename]) + errors = pynwb_validate(path=self.filename) if errors: raise Exception("\n".join(errors)) if os.path.exists(self.export_filename): - errors, _ = pynwb_validate(paths=[self.export_filename]) + errors = pynwb_validate(path=self.export_filename) if errors: raise Exception("\n".join(errors)) @@ -366,11 +366,11 @@ def roundtripExportContainer(self, cache_spec=True): def validate(self): """Validate the created files.""" if os.path.exists(self.filename): - errors, _ = pynwb_validate(paths=[self.filename]) + errors = pynwb_validate(path=self.filename) if errors: raise Exception("\n".join(errors)) if os.path.exists(self.export_filename): - errors, _ = pynwb_validate(paths=[self.export_filename]) + errors = pynwb_validate(path=self.export_filename) if errors: raise Exception("\n".join(errors)) diff --git a/src/pynwb/validate.py b/src/pynwb/validate.py deleted file mode 100644 index 880f860a6..000000000 --- a/src/pynwb/validate.py +++ /dev/null @@ -1,252 +0,0 @@ -"""Command line tool to Validate an NWB file against a namespace.""" -import sys -from argparse import ArgumentParser -from typing import Tuple, List, Dict, Optional - -from hdmf.spec import NamespaceCatalog -from hdmf.build import BuildManager -from hdmf.build import TypeMap as TypeMap -from hdmf.utils import docval, getargs -from hdmf.backends.io import HDMFIO -from hdmf.validate import ValidatorMap - -from pynwb import CORE_NAMESPACE -from pynwb.spec import NWBDatasetSpec, NWBGroupSpec, NWBNamespace - - -def _print_errors(validation_errors: list): - if validation_errors: - print(" - found the following errors:", file=sys.stderr) - for err in validation_errors: - print(str(err), file=sys.stderr) - else: - print(" - no errors found.") - - -def _validate_helper(io: HDMFIO, namespace: str = CORE_NAMESPACE) -> list: - builder = io.read_builder() - validator = ValidatorMap(io.manager.namespace_catalog.get_namespace(name=namespace)) - return validator.validate(builder) - - -def get_cached_namespaces_to_validate( - path: str, driver: Optional[str] = None, aws_region: Optional[str] = None, -) -> Tuple[List[str], BuildManager, Dict[str, str]]: - """ - Determine the most specific namespace(s) that are cached in the given NWBFile that can be used for validation. - - Example - ------- - The following example illustrates how we can use this function to validate against namespaces - cached in a file. This is useful, e.g., when a file was created using an extension - - .. code-block:: python - - from pynwb import validate - from pynwb.validate import get_cached_namespaces_to_validate - path = "my_nwb_file.nwb" - validate_namespaces, manager, cached_namespaces = get_cached_namespaces_to_validate(path) - with NWBHDF5IO(path, "r", manager=manager) as reader: - errors = [] - for ns in validate_namespaces: - errors += validate(io=reader, namespace=ns) - - :param path: Path for the NWB file - :return: Tuple with: - - List of strings with the most specific namespace(s) to use for validation. - - BuildManager object for opening the file for validation - - Dict with the full result from NWBHDF5IO.load_namespaces - """ - from . import NWBHDF5IO # TODO: modularize to avoid circular import - - catalog = NamespaceCatalog( - group_spec_cls=NWBGroupSpec, dataset_spec_cls=NWBDatasetSpec, spec_namespace_cls=NWBNamespace - ) - namespace_dependencies = NWBHDF5IO.load_namespaces( - namespace_catalog=catalog, - path=path, - driver=driver, - aws_region=aws_region - ) - - # Determine which namespaces are the most specific (i.e. extensions) and validate against those - candidate_namespaces = set(namespace_dependencies.keys()) - for namespace_dependency in namespace_dependencies: - candidate_namespaces -= namespace_dependencies[namespace_dependency].keys() - - # TODO: remove this workaround for issue https://github.com/NeurodataWithoutBorders/pynwb/issues/1357 - candidate_namespaces.discard("hdmf-experimental") # remove validation of hdmf-experimental for now - cached_namespaces = sorted(candidate_namespaces) - - if len(cached_namespaces) > 0: - type_map = TypeMap(namespaces=catalog) - manager = BuildManager(type_map=type_map) - else: - manager = None - - return cached_namespaces, manager, namespace_dependencies - - -@docval( - { - "name": "io", - "type": HDMFIO, - "doc": "An open IO to an NWB file.", - "default": None, - }, # For back-compatability - { - "name": "namespace", - "type": str, - "doc": "A specific namespace to validate against.", - "default": None, - }, # Argument order is for back-compatability - { - "name": "paths", - "type": list, - "doc": "List of NWB file paths.", - "default": None, - }, - { - "name": "use_cached_namespaces", - "type": bool, - "doc": "Whether to use namespaces cached within the file for validation.", - "default": True, - }, - { - "name": "verbose", - "type": bool, - "doc": "Whether or not to print messages to stdout.", - "default": False, - }, - { - "name": "driver", - "type": str, - "doc": "Driver for h5py to use when opening the HDF5 file.", - "default": None, - }, - returns="Validation errors in the file.", - rtype=(list, (list, bool)), - is_method=False, -) -def validate(**kwargs): - """Validate NWB file(s) against a namespace or its cached namespaces. - - NOTE: If an io object is provided and no namespace name is specified, then the file will be validated - against the core namespace, even if use_cached_namespaces is True. - """ - from . import NWBHDF5IO # TODO: modularize to avoid circular import - - io, paths, use_cached_namespaces, namespace, verbose, driver = getargs( - "io", "paths", "use_cached_namespaces", "namespace", "verbose", "driver", kwargs - ) - assert io != paths, "Both 'io' and 'paths' were specified! Please choose only one." - - if io is not None: - validation_errors = _validate_helper(io=io, namespace=namespace or CORE_NAMESPACE) - return validation_errors - - status = 0 - validation_errors = list() - for path in paths: - namespaces_to_validate = [] - namespace_message = "PyNWB namespace information" - io_kwargs = dict(path=path, mode="r", driver=driver) - - if use_cached_namespaces: - cached_namespaces, manager, namespace_dependencies = get_cached_namespaces_to_validate( - path=path, driver=driver - ) - io_kwargs.update(manager=manager) - - if any(cached_namespaces): - namespaces_to_validate = cached_namespaces - namespace_message = "cached namespace information" - else: - namespaces_to_validate = [CORE_NAMESPACE] - if verbose: - print( - f"The file {path} has no cached namespace information. Falling back to {namespace_message}.", - file=sys.stderr, - ) - else: - io_kwargs.update(load_namespaces=False) - namespaces_to_validate = [CORE_NAMESPACE] - - if namespace is not None: - if namespace in namespaces_to_validate: - namespaces_to_validate = [namespace] - elif use_cached_namespaces and namespace in namespace_dependencies: # validating against a dependency - for namespace_dependency in namespace_dependencies: - if namespace in namespace_dependencies[namespace_dependency]: - status = 1 - print( - f"The namespace '{namespace}' is included by the namespace " - f"'{namespace_dependency}'. Please validate against that namespace instead.", - file=sys.stderr, - ) - else: - status = 1 - print( - f"The namespace '{namespace}' could not be found in {namespace_message} as only " - f"{namespaces_to_validate} is present.", - file=sys.stderr, - ) - - if status == 1: - continue - - with NWBHDF5IO(**io_kwargs) as io: - for validation_namespace in namespaces_to_validate: - if verbose: - print(f"Validating {path} against {namespace_message} using namespace '{validation_namespace}'.") - validation_errors += _validate_helper(io=io, namespace=validation_namespace) - return validation_errors, status - - -def validate_cli(): - """CLI wrapper around pynwb.validate.""" - parser = ArgumentParser( - description="Validate an NWB file", - epilog="If --ns is not specified, validate against all namespaces in the NWB file.", - ) - - # Special arg specific to CLI - parser.add_argument( - "-lns", - "--list-namespaces", - dest="list_namespaces", - action="store_true", - help="List the available namespaces and exit.", - ) - - # Common args to the API validate - parser.add_argument("paths", type=str, nargs="+", help="NWB file paths") - parser.add_argument("-n", "--ns", type=str, help="the namespace to validate against") - feature_parser = parser.add_mutually_exclusive_group(required=False) - feature_parser.add_argument( - "--no-cached-namespace", - dest="no_cached_namespace", - action="store_true", - help="Use the PyNWB loaded namespace (true) or use the cached namespace (false; default).", - ) - parser.set_defaults(no_cached_namespace=False) - args = parser.parse_args() - status = 0 - - if args.list_namespaces: - for path in args.paths: - cached_namespaces, _, _ = get_cached_namespaces_to_validate(path=path) - print("\n".join(cached_namespaces)) - else: - validation_errors, validation_status = validate( - paths=args.paths, use_cached_namespaces=not args.no_cached_namespace, namespace=args.ns, verbose=True - ) - if not validation_status: - _print_errors(validation_errors=validation_errors) - status = status or validation_status or (validation_errors is not None and len(validation_errors) > 0) - - sys.exit(status) - - -if __name__ == "__main__": # pragma: no cover - validate_cli() diff --git a/src/pynwb/validation.py b/src/pynwb/validation.py new file mode 100644 index 000000000..d2fb5cb17 --- /dev/null +++ b/src/pynwb/validation.py @@ -0,0 +1,198 @@ +"""Module to validate an NWB file against a namespace.""" +from typing import Tuple, List, Dict, Optional +from pathlib import Path +from warnings import warn + +from hdmf.spec import NamespaceCatalog +from hdmf.build import BuildManager, TypeMap +from hdmf.utils import docval, getargs, AllowPositional +from hdmf.backends.io import HDMFIO +from hdmf.validate import ValidatorMap + +from pynwb import CORE_NAMESPACE +from pynwb.spec import NWBDatasetSpec, NWBGroupSpec, NWBNamespace + + +def _validate_helper(io: HDMFIO, namespace: str = CORE_NAMESPACE) -> list: + builder = io.read_builder() + validator = ValidatorMap(io.manager.namespace_catalog.get_namespace(name=namespace)) + return validator.validate(builder) + + +def get_cached_namespaces_to_validate(path: Optional[str] = None, + driver: Optional[str] = None, + aws_region: Optional[str] = None, + io: Optional[HDMFIO] = None +) -> Tuple[List[str], BuildManager, Dict[str, str]]: + """ + Determine the most specific namespace(s) that are cached in the given NWBFile that can be used for validation. + + Example + ------- + The following example illustrates how we can use this function to validate against namespaces + cached in a file. This is useful, e.g., when a file was created using an extension + + .. code-block:: python + + from pynwb import validate + from pynwb.validation import get_cached_namespaces_to_validate + path = "my_nwb_file.nwb" + validate_namespaces, manager, cached_namespaces = get_cached_namespaces_to_validate(path) + with NWBHDF5IO(path, "r", manager=manager) as reader: + errors = [] + for ns in validate_namespaces: + errors += validate(io=reader, namespace=ns) + + :param path: Path for the NWB file + :return: Tuple with: + - List of strings with the most specific namespace(s) to use for validation. + - BuildManager object for opening the file for validation + - Dict with the full result from NWBHDF5IO.load_namespaces + """ + + catalog = NamespaceCatalog( + group_spec_cls=NWBGroupSpec, dataset_spec_cls=NWBDatasetSpec, spec_namespace_cls=NWBNamespace + ) + + if io is not None: + # TODO update HDF5IO to have .file property to make consistent with ZarrIO + # then update input arguments here + namespace_dependencies = io.load_namespaces(namespace_catalog=catalog, + file=io._file) + else: + from pynwb import _get_backend + backend_io = _get_backend(path, method=driver) + namespace_dependencies = backend_io.load_namespaces(namespace_catalog=catalog, + path=path, + driver=driver, + aws_region=aws_region) + + # Determine which namespaces are the most specific (i.e. extensions) and validate against those + candidate_namespaces = set(namespace_dependencies.keys()) + for namespace_dependency in namespace_dependencies: + candidate_namespaces -= namespace_dependencies[namespace_dependency].keys() + + # TODO: remove this workaround for issue https://github.com/NeurodataWithoutBorders/pynwb/issues/1357 + candidate_namespaces.discard("hdmf-experimental") # remove validation of hdmf-experimental for now + cached_namespaces = sorted(candidate_namespaces) + + if len(cached_namespaces) > 0: + type_map = TypeMap(namespaces=catalog) + manager = BuildManager(type_map=type_map) + else: + manager = None + + return cached_namespaces, manager, namespace_dependencies + +@docval( + { + "name": "io", + "type": HDMFIO, + "doc": "An open IO to an NWB file.", + "default": None, + }, # For back-compatability + { + "name": "namespace", + "type": str, + "doc": "A specific namespace to validate against.", + "default": None, + }, # Argument order is for back-compatability + { + "name": "path", + "type": (str, Path), + "doc": "NWB file path.", + "default": None, + }, + { + "name": "use_cached_namespaces", + "type": bool, + "doc": "Whether to use namespaces cached within the file for validation.", + "default": True, + }, + { + "name": "verbose", + "type": bool, + "doc": "Whether or not to print messages to stdout.", + "default": False, + }, + { + "name": "driver", + "type": str, + "doc": "Driver for h5py to use when opening the HDF5 file.", + "default": None, + }, + returns="Validation errors in the file.", + rtype=list, + is_method=False, + allow_positional=AllowPositional.WARNING, +) +def validate(**kwargs): + """Validate NWB file(s) against a namespace or its cached namespaces. + + Note: this function checks for compliance with the NWB schema. + It is recommended to use the NWBInspector for more comprehensive validation of both + compliance with the schema and compliance of data with NWB best practices. + """ + + io, path, use_cached_namespaces, namespace, verbose, driver = getargs( + "io", "path", "use_cached_namespaces", "namespace", "verbose", "driver", kwargs + ) + assert io != path, "Both 'io' and 'path' were specified! Please choose only one." + path = str(path) if isinstance(path, Path) else path + + # get namespaces to validate + namespace_message = "PyNWB namespace information" + io_kwargs = dict(path=path, mode="r", driver=driver) + + if use_cached_namespaces: + cached_namespaces, manager, namespace_dependencies = get_cached_namespaces_to_validate(path=path, + driver=driver, + io=io) + io_kwargs.update(manager=manager) + + if any(cached_namespaces): + namespaces_to_validate = cached_namespaces + namespace_message = "cached namespace information" + else: + namespaces_to_validate = [CORE_NAMESPACE] + if verbose: + warn(f"The file {f'{path} ' if path is not None else ''}has no cached namespace information. " + f"Falling back to {namespace_message}.", UserWarning) + else: + io_kwargs.update(load_namespaces=False) + namespaces_to_validate = [CORE_NAMESPACE] + + # get io object if not provided + if path is not None: + from pynwb import _get_backend + backend_io = _get_backend(path, method=driver) + io = backend_io(**io_kwargs) + + # check namespaces are accurate + if namespace is not None: + if namespace in namespaces_to_validate: + namespaces_to_validate = [namespace] + elif use_cached_namespaces and namespace in namespace_dependencies: # validating against a dependency + for namespace_dependency in namespace_dependencies: + if namespace in namespace_dependencies[namespace_dependency]: + raise ValueError( + f"The namespace '{namespace}' is included by the namespace " + f"'{namespace_dependency}'. Please validate against that namespace instead.") + else: + raise ValueError( + f"The namespace '{namespace}' could not be found in {namespace_message} as only " + f"{namespaces_to_validate} is present.",) + + # validate against namespaces + validation_errors = [] + for validation_namespace in namespaces_to_validate: + if verbose: + print(f"Validating {f'{path} ' if path is not None else ''}against " # noqa: T201 + f"{namespace_message} using namespace '{validation_namespace}'.") + validation_errors += _validate_helper(io=io, namespace=validation_namespace) + + if path is not None: + io.close() # close the io object if it was created within this function, otherwise leave as is + + return validation_errors + diff --git a/src/pynwb/validation_cli.py b/src/pynwb/validation_cli.py new file mode 100644 index 000000000..ad774f25b --- /dev/null +++ b/src/pynwb/validation_cli.py @@ -0,0 +1,84 @@ +"""Command line tool to Validate an NWB file against a namespace.""" +import json +import sys +from argparse import ArgumentParser +from pathlib import Path + +from pynwb.validation import validate, get_cached_namespaces_to_validate + + +def _print_errors(validation_errors: list): + if validation_errors: + print(" - found the following errors:", file=sys.stderr) + for err in validation_errors: + print(str(err), file=sys.stderr) + else: + print(" - no errors found.") + + +def validation_cli(): + """CLI wrapper around pynwb.validate. + + Note: this CLI wrapper checks for compliance with the NWB schema. + It is recommended to use the NWBInspector CLI for more comprehensive validation of both + compliance with the schema and compliance of data with NWB best practices. + """ + parser = ArgumentParser( + description="Validate an NWB file", + epilog="If --ns is not specified, validate against all namespaces in the NWB file.", + ) + + # Special arg specific to CLI + parser.add_argument( + "-lns", + "--list-namespaces", + dest="list_namespaces", + action="store_true", + help="List the available namespaces and exit.", + ) + + # Common args to the API validate + parser.add_argument("paths", type=str, nargs="+", help="NWB file paths") + parser.add_argument("-n", "--ns", type=str, help="the namespace to validate against") + parser.add_argument("--json-output-path", dest="json_output_path", type=str, + help="Write json output to this location.") + feature_parser = parser.add_mutually_exclusive_group(required=False) + feature_parser.add_argument( + "--no-cached-namespace", + dest="no_cached_namespace", + action="store_true", + help="Use the namespaces installed by PyNWB (true) or use the cached namespaces (false; default).", + ) + parser.set_defaults(no_cached_namespace=False) + args = parser.parse_args() + + status = 0 + for path in args.paths: + if args.list_namespaces: + cached_namespaces, _, _ = get_cached_namespaces_to_validate(path=path) + print("\n".join(cached_namespaces)) + else: + validation_errors = [] + try: + val_errors = validate( + path=path, use_cached_namespaces=not args.no_cached_namespace, namespace=args.ns, verbose=True, + ) + _print_errors(validation_errors=val_errors) + status = status or int(val_errors is not None and len(val_errors) > 0) + validation_errors.append(val_errors) + except ValueError as e: + print(e, file=sys.stderr) + status = 1 + + # write output to json file + if args.json_output_path is not None: + with open(args.json_output_path, "w") as f: + json_report = {'exitcode': status, 'errors': [str(e) for e in validation_errors]} + json.dump(obj=json_report, fp=f) + print(f"Report saved to {str(Path(args.json_output_path).absolute())}!") + + sys.exit(status) + + +if __name__ == "__main__": # pragma: no cover + validation_cli() diff --git a/test.py b/test.py index de3c2c762..a93f6237d 100644 --- a/test.py +++ b/test.py @@ -156,7 +156,7 @@ def validate_nwbs(): examples_nwbs = [x for x in examples_nwbs if not x.startswith('sub-')] import pynwb - from pynwb.validate import get_cached_namespaces_to_validate + from pynwb.validation import validate, get_cached_namespaces_to_validate for nwb in examples_nwbs: try: @@ -168,7 +168,8 @@ def validate_nwbs(): is_family_nwb_file = False try: with pynwb.NWBHDF5IO(nwb, mode='r') as io: - errors = pynwb.validate(io) + errors = validate(io, use_cached_namespaces=False) + errors.extend(validate(io, use_cached_namespaces=True)) except OSError as e: # if the file was created with the family driver, need to use the family driver to open it if 'family driver should be used' in str(e): @@ -178,7 +179,8 @@ def validate_nwbs(): memb_size = 1024**2 # note: the memb_size must be the same as the one used to create the file with h5py.File(filename_pattern, mode='r', driver='family', memb_size=memb_size) as f: with pynwb.NWBHDF5IO(file=f, manager=None, mode='r') as io: - errors = pynwb.validate(io) + errors = validate(io, use_cached_namespaces=False) + errors.extend(validate(io, use_cached_namespaces=True)) else: raise e @@ -201,14 +203,14 @@ def validate_nwbs(): ERRORS += 1 cmds = [] - cmds += [["python", "-m", "pynwb.validate", nwb]] - cmds += [["python", "-m", "pynwb.validate", "--no-cached-namespace", nwb]] + cmds += [["pynwb-validate", nwb]] + cmds += [["pynwb-validate", "--no-cached-namespace", nwb]] for ns in namespaces: # for some reason, this logging command is necessary to correctly printing the namespace in the # next logging command logging.info("Namespace found: %s" % ns) - cmds += [["python", "-m", "pynwb.validate", "--ns", ns, nwb]] + cmds += [["pynwb-validate", "--ns", ns, nwb]] for cmd in cmds: logging.info("Validating with \"%s\"." % (" ".join(cmd[:-1]))) diff --git a/tests/integration/ros3/test_ros3.py b/tests/integration/ros3/test_ros3.py index 2571e6199..347b070e4 100644 --- a/tests/integration/ros3/test_ros3.py +++ b/tests/integration/ros3/test_ros3.py @@ -1,6 +1,6 @@ from pynwb import NWBHDF5IO from pynwb import validate -from pynwb.validate import get_cached_namespaces_to_validate +from pynwb.validation import get_cached_namespaces_to_validate from pynwb.testing import TestCase import urllib.request import h5py @@ -90,10 +90,9 @@ def test_dandi_get_cached_namespaces(self): ) self.assertCountEqual(first=found_namespaces, second=expected_namespaces) - self.assertDictEqual(d1=expected_namespace_dependencies, d2=expected_namespace_dependencies) + self.assertDictEqual(d1=found_namespace_dependencies, d2=expected_namespace_dependencies) def test_dandi_validate(self): - result, status = validate(paths=[self.s3_test_path], driver="ros3") + result = validate(path=self.s3_test_path, driver="ros3") assert result == [] - assert status == 0 diff --git a/tests/integration/utils/test_io_utils.py b/tests/integration/utils/test_io_utils.py index 73732924f..7f36e5474 100644 --- a/tests/integration/utils/test_io_utils.py +++ b/tests/integration/utils/test_io_utils.py @@ -1,10 +1,13 @@ """Tests related to pynwb.io.utils.""" import pytest +from datetime import datetime +from dateutil.tz import tzutc + from hdmf.build import GroupBuilder from pynwb.io.utils import get_nwb_version -from pynwb.testing import TestCase - +from pynwb.testing import TestCase, remove_test_file +from pynwb import NWBFile, NWBHDF5IO, _get_backend class TestGetNWBVersion(TestCase): @@ -53,3 +56,23 @@ def test_get_nwb_version_20b(self): builder1.set_attribute(name="nwb_version", value="2.0b") assert get_nwb_version(builder1) == (2, 0, 0) assert get_nwb_version(builder1, include_prerelease=True) == (2, 0, 0, "b") + +class TestGetNWBBackend(TestCase): + def setUp(self): + self.nwbfile = NWBFile(session_description='a test NWB File', + identifier='TEST123', + session_start_time=datetime(1970, 1, 1, 12, tzinfo=tzutc())) + self.hdf5_path = "test_pynwb_nwb_backend.nwb" + with NWBHDF5IO(self.hdf5_path, 'w') as io: + io.write(self.nwbfile) + + def tearDown(self): + remove_test_file(self.hdf5_path) + + def test_get_backend_invalid_file(self): + with self.assertRaises(ValueError): + _get_backend('not_a_file.nwb') + + def test_get_backend_HDF5(self): + backend_io = _get_backend(self.hdf5_path) + self.assertEqual(backend_io, NWBHDF5IO) \ No newline at end of file diff --git a/tests/validation/test_validate.py b/tests/validation/test_validate.py index c2829ee1f..8b2c34888 100644 --- a/tests/validation/test_validate.py +++ b/tests/validation/test_validate.py @@ -1,5 +1,6 @@ import subprocess import re +import os import sys from unittest.mock import patch from io import StringIO @@ -23,7 +24,7 @@ # combine the individual coverage reports into one .coverage file. def run_coverage(extra_args: list[str]): return subprocess.run( - [sys.executable, "-m", "coverage", "run", "-p", "-m", "pynwb.validate"] + [sys.executable, "-m", "coverage", "run", "-p", "-m", "pynwb.validation_cli"] + extra_args, capture_output=True ) @@ -54,13 +55,15 @@ def test_validate_file_no_cache_bad_ns(self): """Test that validating a file with no cached spec against a specified, unknown namespace fails.""" result = run_coverage(["tests/back_compat/1.0.2_nwbfile.nwb", "--ns", "notfound"]) - stderr_regex = re.compile( + stderr_regex_1 = re.compile( r"The file tests/back_compat/1\.0\.2_nwbfile\.nwb has no cached namespace information\. " - r"Falling back to PyNWB namespace information\.\s*" + r"Falling back to PyNWB namespace information\.\s*") + stderr_regex_2 = re.compile( r"The namespace 'notfound' could not be found in PyNWB namespace information as only " - r"\['core'\] is present\.\s*" - ) - self.assertRegex(result.stderr.decode('utf-8'), stderr_regex) + r"\['core'\] is present\.\s*") + + self.assertRegex(result.stderr.decode('utf-8'), stderr_regex_1) + self.assertRegex(result.stderr.decode('utf-8'), stderr_regex_2) self.assertEqual(result.stdout.decode('utf-8'), '') @@ -174,6 +177,22 @@ def test_validate_file_list_namespaces_extension(self): stdout_regex = re.compile(r"ndx-testextension\s*") self.assertRegex(result.stdout.decode('utf-8'), stdout_regex) + def test_validate_file_json_output(self): + """Test that validating a file with the json flag outputs a json file.""" + json_path = "test_validation.json" + run_coverage(["tests/back_compat/1.0.2_str_experimenter.nwb", "--no-cached-namespace", + "--json-output-path", json_path]) + self.assertTrue(os.path.exists(json_path)) + os.remove(json_path) + + def test_validation_entry_point(self): + """Test that using the validation entry point successfully executes the validate CLI.""" + json_path = "test_validation_entry_point.json" + subprocess.run(["pynwb-validate", "tests/back_compat/1.0.2_str_experimenter.nwb", + "--json-output-path", json_path]) + self.assertTrue(os.path.exists(json_path)) + os.remove(json_path) + class TestValidateFunction(TestCase): @@ -199,9 +218,11 @@ def test_validate_io_no_cache(self): def test_validate_io_no_cache_bad_ns(self): """Test that validating a file with no cached spec against a specified, unknown namespace fails.""" - with self.get_io('tests/back_compat/1.0.2_nwbfile.nwb') as io: - with self.assertRaisesWith(KeyError, "\"'notfound' not a namespace\""): - validate(io, 'notfound') + expected_error = ("The namespace 'notfound' could not be found in PyNWB namespace information as only " + "['core'] is present.") + with self.assertRaisesWith(ValueError, expected_error): + with self.get_io('tests/back_compat/1.0.2_nwbfile.nwb') as io: + validate(io=io, namespace='notfound') def test_validate_io_cached(self): """Test that validating a file with cached spec against its cached namespace succeeds.""" @@ -221,75 +242,80 @@ def test_validate_io_cached_extension_pass_ns(self): errors = validate(io, 'ndx-testextension') self.assertEqual(errors, []) - def test_validate_io_cached_core_with_io(self): - """ - For back-compatability, test that validating a file with cached extension spec against the core - namespace succeeds when using the `io` + `namespace` keywords. - """ - with self.get_io(path='tests/back_compat/2.1.0_nwbfile_with_extension.nwb') as io: - results = validate(io=io, namespace="core") - self.assertEqual(results, []) - def test_validate_file_cached_extension(self): """ Test that validating a file with cached extension spec against the core namespace raises an error with the new CLI-mimicing paths keyword. """ nwbfile_path = "tests/back_compat/2.1.0_nwbfile_with_extension.nwb" - with patch("sys.stderr", new=StringIO()) as fake_err: - with patch("sys.stdout", new=StringIO()) as fake_out: - results, status = validate(paths=[nwbfile_path], namespace="core", verbose=True) - self.assertEqual(results, []) - self.assertEqual(status, 1) - self.assertEqual( - fake_err.getvalue(), - ( - "The namespace 'core' is included by the namespace 'ndx-testextension'. " - "Please validate against that namespace instead.\n" - ) - ) - self.assertEqual(fake_out.getvalue(), "") + expected_error = ("The namespace 'core' is included by the namespace 'ndx-testextension'. " + "Please validate against that namespace instead.") + with self.assertRaisesWith(ValueError, expected_error): + validate(path=nwbfile_path, namespace="core", verbose=True) def test_validate_file_cached_core(self): """ Test that validating a file with cached core spec with verbose=False. """ nwbfile_path = "tests/back_compat/1.1.2_nwbfile.nwb" - with patch("sys.stderr", new=StringIO()) as fake_err: - with patch("sys.stdout", new=StringIO()) as fake_out: - results, status = validate(paths=[nwbfile_path], namespace="core") - self.assertEqual(results, []) - self.assertEqual(status, 0) - self.assertEqual(fake_err.getvalue(), "") - self.assertEqual(fake_out.getvalue(), "") + results = validate(path=nwbfile_path, namespace="core") + self.assertEqual(results, []) def test_validate_file_cached_no_cache_bad_ns(self): """ Test that validating a file with no cached namespace, a namespace that is not found, and verbose=False. """ nwbfile_path = "tests/back_compat/1.0.2_nwbfile.nwb" - with patch("sys.stderr", new=StringIO()) as fake_err: - with patch("sys.stdout", new=StringIO()) as fake_out: - results, status = validate(paths=[nwbfile_path], namespace="notfound") - self.assertEqual(results, []) - self.assertEqual(status, 1) - stderr_regex = ( - r"The namespace 'notfound' could not be found in PyNWB namespace information as only " - r"\['core'\] is present.\n" - ) - self.assertRegex(fake_err.getvalue(), stderr_regex) - self.assertEqual(fake_out.getvalue(), "") - - def test_validate_io_cached_bad_ns(self): - """Test that validating a file with cached spec against a specified, unknown namespace fails.""" - with self.get_io('tests/back_compat/1.1.2_nwbfile.nwb') as io: - with self.assertRaisesWith(KeyError, "\"'notfound' not a namespace\""): - validate(io, 'notfound') + expected_error = ("The namespace 'notfound' could not be found in PyNWB namespace information as only " + "['core'] is present.") + with self.assertRaisesWith(ValueError, expected_error): + validate(path=nwbfile_path, namespace="notfound") def test_validate_io_cached_hdmf_common(self): """Test that validating a file with cached spec against the hdmf-common namespace fails.""" - with self.get_io('tests/back_compat/1.1.2_nwbfile.nwb') as io: - # TODO this error should not be different from the error when using the validate script above - msg = "builder must have data type defined with attribute 'data_type'" - with self.assertRaisesWith(ValueError, msg): - validate(io, 'hdmf-common') + expected_error = ("The namespace 'hdmf-common' is included by the namespace 'core'. " + "Please validate against that namespace instead.") + with self.assertRaisesWith(ValueError, expected_error): + with self.get_io(path='tests/back_compat/1.1.2_nwbfile.nwb') as io: + validate(io=io, namespace="hdmf-common", verbose=True) + + def test_validate_io_and_path_same(self): + """Test that validating a file with an io object and a path return the same results.""" + tests = [('tests/back_compat/1.0.2_nwbfile.nwb', None), + ('tests/back_compat/1.1.2_nwbfile.nwb', None), + ('tests/back_compat/1.1.2_nwbfile.nwb', 'core'), + ('tests/back_compat/2.1.0_nwbfile_with_extension.nwb', None), + ('tests/back_compat/2.1.0_nwbfile_with_extension.nwb', 'ndx-testextension'),] + + tests_with_error = [('tests/back_compat/1.0.2_nwbfile.nwb', 'notfound'), + ('tests/back_compat/1.1.2_nwbfile.nwb', 'notfound'), + ('tests/back_compat/1.1.2_nwbfile.nwb', 'hdmf-common'), + ('tests/back_compat/2.1.0_nwbfile_with_extension.nwb', 'core'),] + + # paths that cause no errors + for path, ns in tests: + with patch("sys.stdout", new=StringIO()) as fake_out: + with self.get_io(path=path) as io: + results_io = validate(io=io, namespace=ns, verbose=True) + out_io = fake_out.getvalue() + + with patch("sys.stdout", new=StringIO()) as fake_out: + results_path = validate(path=path, namespace=ns, verbose=True) + out_path = fake_out.getvalue() + + # remove path from error messages since it will not be included in io outputs + out_path = out_path.replace(f'{path} ', '') + self.assertEqual(results_io, results_path) + self.assertEqual(out_io, out_path) + + # paths that return errors + for path, ns in tests_with_error: + with self.assertRaises(ValueError) as e_io: + with self.get_io(path=path) as io: + results_io = validate(io=io, namespace=ns, verbose=True) + + with self.assertRaises(ValueError) as e_path: + results_path = validate(path=path, namespace=ns, verbose=True) + + # remove path from error messages since it will not be included in io outputs + self.assertEqual(str(e_io.exception), str(e_path.exception)) From d7efa5b117abc3b0603380b95283bc99188368ad Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Fri, 3 Jan 2025 16:46:31 -0800 Subject: [PATCH 6/8] update dates and requirements --- Legal.txt | 2 +- README.rst | 4 ++-- docs/source/conf.py | 2 +- environment-ros3.yml | 4 ++-- license.txt | 2 +- pyproject.toml | 2 +- requirements-doc.txt | 3 ++- requirements-opt.txt | 10 +++++----- 8 files changed, 15 insertions(+), 14 deletions(-) diff --git a/Legal.txt b/Legal.txt index eb5bcac88..c16773cbb 100644 --- a/Legal.txt +++ b/Legal.txt @@ -1,4 +1,4 @@ -“pynwb” Copyright (c) 2017-2024, The Regents of the University of California, through Lawrence Berkeley National Laboratory (subject to receipt of any required approvals from the U.S. Dept. of Energy). All rights reserved. +“pynwb” Copyright (c) 2017-2025, The Regents of the University of California, through Lawrence Berkeley National Laboratory (subject to receipt of any required approvals from the U.S. Dept. of Energy). All rights reserved. If you have questions about your rights to use or distribute this software, please contact Berkeley Lab's Innovation & Partnerships Office at IPO@lbl.gov. diff --git a/README.rst b/README.rst index 408446cff..aeba219ee 100644 --- a/README.rst +++ b/README.rst @@ -94,7 +94,7 @@ Citing NWB LICENSE ======= -"pynwb" Copyright (c) 2017-2024, The Regents of the University of California, through Lawrence Berkeley National Laboratory (subject to receipt of any required approvals from the U.S. Dept. of Energy). All rights reserved. +"pynwb" Copyright (c) 2017-2025, The Regents of the University of California, through Lawrence Berkeley National Laboratory (subject to receipt of any required approvals from the U.S. Dept. of Energy). All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: (1) Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. @@ -110,7 +110,7 @@ You are under no obligation whatsoever to provide any bug fixes, patches, or upg COPYRIGHT ========= -"pynwb" Copyright (c) 2017-2024, The Regents of the University of California, through Lawrence Berkeley National Laboratory (subject to receipt of any required approvals from the U.S. Dept. of Energy). All rights reserved. +"pynwb" Copyright (c) 2017-2025, The Regents of the University of California, through Lawrence Berkeley National Laboratory (subject to receipt of any required approvals from the U.S. Dept. of Energy). All rights reserved. If you have questions about your rights to use or distribute this software, please contact Berkeley Lab's Innovation & Partnerships Office at IPO@lbl.gov. NOTICE. This Software was developed under funding from the U.S. Department of Energy and the U.S. Government consequently retains certain rights. As such, the U.S. Government has been granted for itself and others acting on its behalf a paid-up, nonexclusive, irrevocable, worldwide license in the Software to reproduce, distribute copies to the public, prepare derivative works, and perform publicly and display publicly, and to permit other to do so. diff --git a/docs/source/conf.py b/docs/source/conf.py index c4966c491..c10aed355 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -189,7 +189,7 @@ def __call__(self, filename): # General information about the project. project = u'PyNWB' -copyright = u'2017-2024, Neurodata Without Borders' +copyright = u'2017-2025, Neurodata Without Borders' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the diff --git a/environment-ros3.yml b/environment-ros3.yml index bc7483f1e..8d946726b 100644 --- a/environment-ros3.yml +++ b/environment-ros3.yml @@ -13,9 +13,9 @@ dependencies: - python-dateutil==2.9.0 - setuptools - pytest==7.4.3 # pin to pytest < 8 because of incompatibilities to be addressed - - fsspec==2024.6.0 + - fsspec==2024.12.0 - requests==2.32.3 - - aiohttp==3.11.7 + - aiohttp==3.11.11 - pip - pip: - remfile==0.1.13 diff --git a/license.txt b/license.txt index 07d29b4fe..c6d771edb 100644 --- a/license.txt +++ b/license.txt @@ -1,4 +1,4 @@ -“pynwb” Copyright (c) 2017-2024, The Regents of the University of California, through Lawrence Berkeley National Laboratory (subject to receipt of any required approvals from the U.S. Dept. of Energy). All rights reserved. +“pynwb” Copyright (c) 2017-2025, The Regents of the University of California, through Lawrence Berkeley National Laboratory (subject to receipt of any required approvals from the U.S. Dept. of Energy). All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: diff --git a/pyproject.toml b/pyproject.toml index 755d3384e..edea11fed 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,7 @@ classifiers = [ dependencies = [ "h5py>=3.2.0", "hdmf>=3.14.5", - "numpy>=1.23.0", + "numpy>=1.24.0", "pandas>=1.2.0", "python-dateutil>=2.8.2", ] diff --git a/requirements-doc.txt b/requirements-doc.txt index 8ff798ff2..c084155f0 100644 --- a/requirements-doc.txt +++ b/requirements-doc.txt @@ -16,4 +16,5 @@ hdmf-zarr>=0.3.0 zarr<3 # limited to zarr<3 until hdmf-zarr resolves issues with zarr 3.0 linkml-runtime==1.7.4; python_version >= "3.10" schemasheets==0.2.1; python_version >= "3.10" -oaklib==0.5.32; python_version >= "3.10" \ No newline at end of file +oaklib==0.5.32; python_version >= "3.10" +pooch \ No newline at end of file diff --git a/requirements-opt.txt b/requirements-opt.txt index fa6a18806..f736c2c92 100644 --- a/requirements-opt.txt +++ b/requirements-opt.txt @@ -1,8 +1,8 @@ -linkml-runtime==1.7.4 -schemasheets==0.2.1 -oaklib==0.5.32 +linkml-runtime==1.8.3 +schemasheets==0.3.1 +oaklib==0.6.18 # for streaming tests -fsspec==2024.10.0 +fsspec==2024.12.0 requests==2.32.3 -aiohttp==3.10.11 +aiohttp==3.11.11 From 90bdae6deb29f7beb917988abc223a6bc72680c5 Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Fri, 3 Jan 2025 16:47:32 -0800 Subject: [PATCH 7/8] fix test warnings --- docs/gallery/domain/images.py | 6 ++-- tests/integration/hdf5/test_io.py | 2 +- .../integration/hdf5/test_modular_storage.py | 2 +- tests/validation/test_validate.py | 32 +++++++++++++------ 4 files changed, 28 insertions(+), 14 deletions(-) diff --git a/docs/gallery/domain/images.py b/docs/gallery/domain/images.py index 4e9214784..a6821d11a 100644 --- a/docs/gallery/domain/images.py +++ b/docs/gallery/domain/images.py @@ -296,21 +296,21 @@ # :py:class:`~pynwb.image.IndexSeries` that indexes into the # :py:class:`~pynwb.base.Images`. -from scipy import misc +from scipy import datasets from pynwb.base import ImageReferences from pynwb.image import GrayscaleImage, Images, IndexSeries, RGBImage gs_face = GrayscaleImage( name="gs_face", - data=misc.face(gray=True), + data=datasets.face(gray=True), description="Grayscale version of a raccoon.", resolution=35.433071, ) rgb_face = RGBImage( name="rgb_face", - data=misc.face(), + data=datasets.face(), resolution=70.0, description="RGB version of a raccoon.", ) diff --git a/tests/integration/hdf5/test_io.py b/tests/integration/hdf5/test_io.py index 1e6ed0593..ce3b0ab36 100644 --- a/tests/integration/hdf5/test_io.py +++ b/tests/integration/hdf5/test_io.py @@ -296,7 +296,7 @@ def test_append(self): np.testing.assert_equal(nwb.acquisition['timeseries2'].data[:], ts2.data) self.assertIs(nwb.processing['test_proc_mod']['LFP'].electrical_series['test_es'].electrodes, nwb.acquisition['timeseries2'].electrodes) - errors = validate(io) + errors = validate(io=io) self.assertEqual(len(errors), 0, errors) def test_electrode_id_uniqueness(self): diff --git a/tests/integration/hdf5/test_modular_storage.py b/tests/integration/hdf5/test_modular_storage.py index fba5d02db..a6df4e6ad 100644 --- a/tests/integration/hdf5/test_modular_storage.py +++ b/tests/integration/hdf5/test_modular_storage.py @@ -192,7 +192,7 @@ def validate(self): for fn in filenames: if os.path.exists(fn): with NWBHDF5IO(fn, mode='r') as io: - errors = pynwb_validate(io) + errors = pynwb_validate(io=io) if errors: for err in errors: raise Exception(err) diff --git a/tests/validation/test_validate.py b/tests/validation/test_validate.py index 8b2c34888..39be1bbe2 100644 --- a/tests/validation/test_validate.py +++ b/tests/validation/test_validate.py @@ -213,7 +213,7 @@ def get_io(self, path): def test_validate_io_no_cache(self): """Test that validating a file with no cached spec against the core namespace succeeds.""" with self.get_io('tests/back_compat/1.0.2_nwbfile.nwb') as io: - errors = validate(io) + errors = validate(io=io) self.assertEqual(errors, []) def test_validate_io_no_cache_bad_ns(self): @@ -227,19 +227,19 @@ def test_validate_io_no_cache_bad_ns(self): def test_validate_io_cached(self): """Test that validating a file with cached spec against its cached namespace succeeds.""" with self.get_io('tests/back_compat/1.1.2_nwbfile.nwb') as io: - errors = validate(io) + errors = validate(io=io) self.assertEqual(errors, []) def test_validate_io_cached_extension(self): """Test that validating a file with cached spec against its cached namespaces succeeds.""" with self.get_io('tests/back_compat/2.1.0_nwbfile_with_extension.nwb') as io: - errors = validate(io) + errors = validate(io=io) self.assertEqual(errors, []) def test_validate_io_cached_extension_pass_ns(self): """Test that validating a file with cached extension spec against the extension namespace succeeds.""" with self.get_io('tests/back_compat/2.1.0_nwbfile_with_extension.nwb') as io: - errors = validate(io, 'ndx-testextension') + errors = validate(io=io, namespace='ndx-testextension') self.assertEqual(errors, []) def test_validate_file_cached_extension(self): @@ -281,17 +281,17 @@ def test_validate_io_cached_hdmf_common(self): def test_validate_io_and_path_same(self): """Test that validating a file with an io object and a path return the same results.""" - tests = [('tests/back_compat/1.0.2_nwbfile.nwb', None), - ('tests/back_compat/1.1.2_nwbfile.nwb', None), + tests = [('tests/back_compat/1.1.2_nwbfile.nwb', None), ('tests/back_compat/1.1.2_nwbfile.nwb', 'core'), ('tests/back_compat/2.1.0_nwbfile_with_extension.nwb', None), ('tests/back_compat/2.1.0_nwbfile_with_extension.nwb', 'ndx-testextension'),] - tests_with_error = [('tests/back_compat/1.0.2_nwbfile.nwb', 'notfound'), - ('tests/back_compat/1.1.2_nwbfile.nwb', 'notfound'), + tests_with_error = [('tests/back_compat/1.1.2_nwbfile.nwb', 'notfound'), ('tests/back_compat/1.1.2_nwbfile.nwb', 'hdmf-common'), ('tests/back_compat/2.1.0_nwbfile_with_extension.nwb', 'core'),] + tests_with_warning = [('tests/back_compat/1.0.2_nwbfile.nwb', None),] + # paths that cause no errors for path, ns in tests: with patch("sys.stdout", new=StringIO()) as fake_out: @@ -307,7 +307,21 @@ def test_validate_io_and_path_same(self): out_path = out_path.replace(f'{path} ', '') self.assertEqual(results_io, results_path) self.assertEqual(out_io, out_path) - + + #paths that return warnings + for path, ns in tests_with_warning: + with self.assertWarns(UserWarning) as w_io: + with self.get_io(path=path) as io: + results_io = validate(io=io, namespace=ns, verbose=True) + print(results_io) + + with self.assertWarns(UserWarning) as w_path: + results_path = validate(path=path, namespace=ns, verbose=True) + + # remove path from error messages since it will not be included in io outputs + out_path = str(w_path.warning).replace(f'{path} ', '') + self.assertEqual(str(w_io.warning), out_path) + # paths that return errors for path, ns in tests_with_error: with self.assertRaises(ValueError) as e_io: From 88991ebdf3563da282b4734c206353a081b2d88b Mon Sep 17 00:00:00 2001 From: Steph Prince <40640337+stephprince@users.noreply.github.com> Date: Fri, 3 Jan 2025 16:50:50 -0800 Subject: [PATCH 8/8] fix conflicting requirements --- requirements-doc.txt | 6 +++--- tests/validation/test_validate.py | 1 - 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/requirements-doc.txt b/requirements-doc.txt index c084155f0..31d013007 100644 --- a/requirements-doc.txt +++ b/requirements-doc.txt @@ -14,7 +14,7 @@ hdf5plugin dandi>=0.46.6 hdmf-zarr>=0.3.0 zarr<3 # limited to zarr<3 until hdmf-zarr resolves issues with zarr 3.0 -linkml-runtime==1.7.4; python_version >= "3.10" -schemasheets==0.2.1; python_version >= "3.10" -oaklib==0.5.32; python_version >= "3.10" +linkml-runtime==1.8.3; python_version >= "3.10" +schemasheets==0.3.1; python_version >= "3.10" +oaklib==0.6.18; python_version >= "3.10" pooch \ No newline at end of file diff --git a/tests/validation/test_validate.py b/tests/validation/test_validate.py index 39be1bbe2..dc1be008c 100644 --- a/tests/validation/test_validate.py +++ b/tests/validation/test_validate.py @@ -313,7 +313,6 @@ def test_validate_io_and_path_same(self): with self.assertWarns(UserWarning) as w_io: with self.get_io(path=path) as io: results_io = validate(io=io, namespace=ns, verbose=True) - print(results_io) with self.assertWarns(UserWarning) as w_path: results_path = validate(path=path, namespace=ns, verbose=True)