diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index 2904dacc1..158d251ad 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -19,15 +19,16 @@ jobs: fail-fast: false steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Conda - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: auto-update-conda: true python-version: ${{ matrix.python-version }} environment-file: environment-minimal.yml activate-environment: caiman + conda-solver: libmamba - name: Install OS Dependencies shell: bash -l {0} diff --git a/VERSION b/VERSION index 1cac385c6..720c7384c 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.11.0 +1.11.1 diff --git a/caiman/caimanmanager.py b/caiman/caimanmanager.py index c2d57b8c0..62580930c 100755 --- a/caiman/caimanmanager.py +++ b/caiman/caimanmanager.py @@ -3,6 +3,7 @@ import argparse import filecmp import glob +import json import os import platform import psutil @@ -52,6 +53,20 @@ def do_install_to(targdir: str, inplace: bool = False, force: bool = False) -> None: global sourcedir_base + cwd = None # Assigning so it exists to avoid UnboundLocalError + + try: + import importlib + import importlib_metadata + # A lot can change upstream with this code; I hope the APIs are stable, but just in case, make this best-effort + if json.loads(importlib_metadata.Distribution.from_name('caiman').read_text('direct_url.json'))['dir_info']['editable']: + inplace = True + cwd = os.getcwd() + os.chdir(str(importlib.resources.files('caiman').joinpath('..'))) + print(f"Used editable fallback, entered {os.getcwd()} directory") + except: + print("Did not use editable fallback") + ignore_pycache=shutil.ignore_patterns('__pycache__') if os.path.isdir(targdir) and not force: raise Exception(targdir + " already exists. You may move it out of the way, remove it, or use --force") @@ -77,6 +92,8 @@ def do_install_to(targdir: str, inplace: bool = False, force: bool = False) -> N with open(os.path.join(targdir, 'RELEASE'), 'w') as verfile_fh: print(f"Version:{caiman.__version__}", file=verfile_fh) print("Installed " + targdir) + if cwd is not None: + os.chdir(cwd) def do_check_install(targdir: str, inplace: bool = False) -> None: diff --git a/caiman/paths.py b/caiman/paths.py index 05abfb336..91dc1d0bf 100644 --- a/caiman/paths.py +++ b/caiman/paths.py @@ -38,7 +38,7 @@ def get_tempdir() -> str: if os.path.isdir(os.environ['CAIMAN_TEMP']): return os.environ['CAIMAN_TEMP'] else: - logging.warning(f"CAIMAN_TEMP is set to nonexistent directory {os.environment['CAIMAN_TEMP']}. Ignoring") + logging.warning(f"CAIMAN_TEMP is set to nonexistent directory {os.environ['CAIMAN_TEMP']}. Ignoring") temp_under_data = os.path.join(caiman_datadir(), "temp") if not os.path.isdir(temp_under_data): logging.warning(f"Default temporary dir {temp_under_data} does not exist, creating") diff --git a/caiman/source_extraction/cnmf/spatial.py b/caiman/source_extraction/cnmf/spatial.py index f8b1a833d..045027c8a 100644 --- a/caiman/source_extraction/cnmf/spatial.py +++ b/caiman/source_extraction/cnmf/spatial.py @@ -504,7 +504,7 @@ def threshold_components(A, dims, medw=None, thr_method='max', maxthr=0.1, nrgth res = dview.map_async( threshold_components_parallel, pars).get(4294967) else: - res = dview.map_async(threshold_components_parallel, pars) + res = dview.map_sync(threshold_components_parallel, pars) else: res = list(map(threshold_components_parallel, pars)) diff --git a/caiman/utils/utils.py b/caiman/utils/utils.py index caf82d906..e0dd46f15 100644 --- a/caiman/utils/utils.py +++ b/caiman/utils/utils.py @@ -561,25 +561,16 @@ def recursively_load_dict_contents_from_group(h5file:h5py.File, path:str) -> dic for key, item in h5file[path].items(): if isinstance(item, h5py._hl.dataset.Dataset): - val_set = np.nan - if isinstance(item[()], str): - if item[()] == 'NoneType': - ans[key] = None - else: - ans[key] = item[()] - - elif key in ['dims', 'medw', 'sigma_smooth_snmf', 'dxy', 'max_shifts', 'strides', 'overlaps']: - if isinstance(item[()], np.ndarray): - ans[key] = tuple(item[()]) - else: - ans[key] = item[()] + val = item[()] + if isinstance(val, str) and val == 'NoneType' or isinstance(val, bytes) and val == b'NoneType': + ans[key] = None + elif key in ['dims', 'medw', 'sigma_smooth_snmf', + 'dxy', 'max_shifts', 'strides', 'overlaps'] and isinstance(val, np.ndarray): + ans[key] = tuple(val) + elif isinstance(val, np.bool_): # sigh + ans[key] = bool(val) else: - if isinstance(item[()], np.bool_): # sigh - ans[key] = bool(item[()]) - else: - ans[key] = item[()] - if isinstance(ans[key], bytes) and ans[key] == b'NoneType': - ans[key] = None + ans[key] = item[()] elif isinstance(item, h5py._hl.group.Group): if key in ('A', 'W', 'Ab', 'downscale_matrix', 'upscale_matrix'): diff --git a/environment-minimal.yml b/environment-minimal.yml index 6ff26de47..a7d097ed0 100644 --- a/environment-minimal.yml +++ b/environment-minimal.yml @@ -23,7 +23,7 @@ dependencies: - scikit-image >=0.19.0 - scikit-learn >=1.2 - scipy >= 1.10.1 -- tensorflow >=2.4.0 +- tensorflow >=2.4.0,<2.16 - tifffile - tqdm - zarr diff --git a/environment.yml b/environment.yml index 6ae3e04e5..6534f39a5 100644 --- a/environment.yml +++ b/environment.yml @@ -30,7 +30,7 @@ dependencies: - scikit-image >=0.19.0 - scikit-learn >=1.2 - scipy >= 1.10.1 -- tensorflow >=2.4.0 +- tensorflow >=2.4.0,<2.16 - tifffile - tk - tqdm