diff --git a/CHANGELOG.md b/CHANGELOG.md index dba230f1..8912cf37 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,7 @@ ### 0.14.3 -- TBD - Fixed - Added encapsulating double quotes to comply with [DOT language](https://graphviz.org/doc/info/lang.html) - PR [#1177](https://github.com/datajoint/datajoint-python/pull/1177) - Added - Datajoint python CLI ([#940](https://github.com/datajoint/datajoint-python/issues/940)) PR [#1095](https://github.com/datajoint/datajoint-python/pull/1095) +- Added - Ability to set hidden attributes on a table - PR [#1091](https://github.com/datajoint/datajoint-python/pull/1091) ### 0.14.2 -- Aug 19, 2024 - Added - Migrate nosetests to pytest - PR [#1142](https://github.com/datajoint/datajoint-python/pull/1142) diff --git a/datajoint/autopopulate.py b/datajoint/autopopulate.py index 4f2a5f22..0e16ee29 100644 --- a/datajoint/autopopulate.py +++ b/datajoint/autopopulate.py @@ -23,7 +23,7 @@ def _initialize_populate(table, jobs, populate_kwargs): """ - Initialize the process for mulitprocessing. + Initialize the process for multiprocessing. Saves the unpickled copy of the table to the current process and reconnects. """ process = mp.current_process() @@ -153,6 +153,7 @@ def _jobs_to_do(self, restrictions): def populate( self, *restrictions, + keys=None, suppress_errors=False, return_exception_objects=False, reserve_jobs=False, @@ -169,6 +170,8 @@ def populate( :param restrictions: a list of restrictions each restrict (table.key_source - target.proj()) + :param keys: The list of keys (dicts) to send to self.make(). + If None (default), then use self.key_source to query they keys. :param suppress_errors: if True, do not terminate execution. :param return_exception_objects: return error objects instead of just error messages :param reserve_jobs: if True, reserve jobs to populate in asynchronous fashion @@ -206,7 +209,10 @@ def handler(signum, frame): old_handler = signal.signal(signal.SIGTERM, handler) - keys = (self._jobs_to_do(restrictions) - self.target).fetch("KEY", limit=limit) + if keys is None: + keys = (self._jobs_to_do(restrictions) - self.target).fetch( + "KEY", limit=limit + ) # exclude "error", "ignore" or "reserved" jobs if reserve_jobs: @@ -295,6 +301,7 @@ def _populate1( :return: (key, error) when suppress_errors=True, True if successfully invoke one `make()` call, otherwise False """ + # use the legacy `_make_tuples` callback. make = self._make_tuples if hasattr(self, "_make_tuples") else self.make if jobs is not None and not jobs.reserve( diff --git a/datajoint/declare.py b/datajoint/declare.py index 2e17c798..9a1706a0 100644 --- a/datajoint/declare.py +++ b/datajoint/declare.py @@ -6,6 +6,7 @@ import re import pyparsing as pp import logging +from hashlib import sha1 from .errors import DataJointError, _support_filepath_types, FILEPATH_FEATURE_SWITCH from .attribute_adapter import get_adapter from .condition import translate_attribute @@ -310,6 +311,18 @@ def declare(full_table_name, definition, context): external_stores, ) = prepare_declare(definition, context) + metadata_attr_sql = [ + "`_{full_table_name}_timestamp` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP" + ] + attribute_sql.extend( + attr.format( + full_table_name=sha1( + full_table_name.replace("`", "").encode("utf-8") + ).hexdigest() + ) + for attr in metadata_attr_sql + ) + if not primary_key: raise DataJointError("Table must have a primary key") diff --git a/datajoint/heading.py b/datajoint/heading.py index c028b20c..f765f835 100644 --- a/datajoint/heading.py +++ b/datajoint/heading.py @@ -33,6 +33,7 @@ is_attachment=False, is_filepath=False, is_external=False, + is_hidden=False, adapter=None, store=None, unsupported=False, @@ -120,7 +121,7 @@ def table_status(self): def attributes(self): if self._attributes is None: self._init_from_database() # lazy loading from database - return self._attributes + return {k: v for k, v in self._attributes.items() if not v.is_hidden} @property def names(self): @@ -300,6 +301,7 @@ def _init_from_database(self): store=None, is_external=False, attribute_expression=None, + is_hidden=attr["name"].startswith("_"), ) if any(TYPE_PATTERN[t].match(attr["type"]) for t in ("INTEGER", "FLOAT")): diff --git a/datajoint/table.py b/datajoint/table.py index 96e38082..1ad4177a 100644 --- a/datajoint/table.py +++ b/datajoint/table.py @@ -644,6 +644,8 @@ def cascade(table): logger.warn("Nothing to delete.") if transaction: self.connection.cancel_transaction() + elif not transaction: + logger.info("Delete completed") else: if not safemode or user_choice("Commit deletes?", default="no") == "yes": if transaction: diff --git a/tests/test_blob_matlab.py b/tests/test_blob_matlab.py index 8e467cf0..17a6ac65 100644 --- a/tests/test_blob_matlab.py +++ b/tests/test_blob_matlab.py @@ -34,7 +34,7 @@ def insert_blobs(schema): schema.connection.query( """ - INSERT INTO {table_name} VALUES + INSERT INTO {table_name} (`id`, `comment`, `blob`) VALUES (1,'simple string',0x6D596D00410200000000000000010000000000000010000000000000000400000000000000630068006100720061006300740065007200200073007400720069006E006700), (2,'1D vector',0x6D596D0041020000000000000001000000000000000C000000000000000600000000000000000000000000F03F00000000000030400000000000003F4000000000000047400000000000804E4000000000000053400000000000C056400000000000805A400000000000405E4000000000000061400000000000E062400000000000C06440), (3,'string array',0x6D596D00430200000000000000010000000000000002000000000000002F0000000000000041020000000000000001000000000000000700000000000000040000000000000073007400720069006E00670031002F0000000000000041020000000000000001000000000000000700000000000000040000000000000073007400720069006E0067003200), diff --git a/tests/test_declare.py b/tests/test_declare.py index 8939000b..50845eac 100644 --- a/tests/test_declare.py +++ b/tests/test_declare.py @@ -360,3 +360,11 @@ class Table_With_Underscores(dj.Manual): dj.DataJointError, match="must be alphanumeric in CamelCase" ) as e: schema_any(Table_With_Underscores) + + +def test_hidden_attributes(schema_any): + assert ( + list(Experiment().heading._attributes.keys())[-1].split("_")[2] == "timestamp" + ) + assert any(a.is_hidden for a in Experiment().heading._attributes.values()) + assert not any(a.is_hidden for a in Experiment().heading.attributes.values()) diff --git a/tests_old/test_blob_matlab.py b/tests_old/test_blob_matlab.py index 6104c929..a2fa67fd 100644 --- a/tests_old/test_blob_matlab.py +++ b/tests_old/test_blob_matlab.py @@ -40,7 +40,7 @@ def insert_blobs(): schema.connection.query( """ - INSERT INTO {table_name} VALUES + INSERT INTO {table_name} (`id`, `comment`, `blob`) VALUES (1,'simple string',0x6D596D00410200000000000000010000000000000010000000000000000400000000000000630068006100720061006300740065007200200073007400720069006E006700), (2,'1D vector',0x6D596D0041020000000000000001000000000000000C000000000000000600000000000000000000000000F03F00000000000030400000000000003F4000000000000047400000000000804E4000000000000053400000000000C056400000000000805A400000000000405E4000000000000061400000000000E062400000000000C06440), (3,'string array',0x6D596D00430200000000000000010000000000000002000000000000002F0000000000000041020000000000000001000000000000000700000000000000040000000000000073007400720069006E00670031002F0000000000000041020000000000000001000000000000000700000000000000040000000000000073007400720069006E0067003200), diff --git a/tests_old/test_declare.py b/tests_old/test_declare.py index 67f53244..a09cb009 100644 --- a/tests_old/test_declare.py +++ b/tests_old/test_declare.py @@ -341,3 +341,12 @@ class WithSuchALongPartNameThatItCrashesMySQL(dj.Part): definition = """ -> (master) """ + + @staticmethod + def test_hidden_attributes(): + assert ( + list(Experiment().heading._attributes.keys())[-1].split("_")[2] + == "timestamp" + ) + assert any(a.is_hidden for a in Experiment().heading._attributes.values()) + assert not any(a.is_hidden for a in Experiment().heading.attributes.values())