diff --git a/caiman/base/rois.py b/caiman/base/rois.py index 38b158404..fddd1471f 100644 --- a/caiman/base/rois.py +++ b/caiman/base/rois.py @@ -780,7 +780,7 @@ def find_matches(D_s, print_assignment: bool = False) -> tuple[list, list]: if print_assignment: logger.debug(f'({row}, {column}) -> {value}') total.append(value) - logger.debug((f'FOV: {ii}, shape: {DD.shape[0]},{DD.shape[1]} total cost: {np.sum(total)}') + logger.debug(f'FOV: {ii}, shape: {DD.shape[0]},{DD.shape[1]} total cost: {np.sum(total)}') logger.debug(time.time() - t_start) costs.append(total) # send back the results in the format we want diff --git a/caiman/components_evaluation.py b/caiman/components_evaluation.py index 0f0ed295e..5bf9a47c0 100644 --- a/caiman/components_evaluation.py +++ b/caiman/components_evaluation.py @@ -143,7 +143,7 @@ def find_activity_intervals(C, Npeaks: int = 5, tB=-3, tA=10, thres: float = 0.3 for i in range(K): if np.sum(np.abs(np.diff(C[i, :]))) == 0: L.append([]) - logger.debug('empty component at:' + str(i)) + logger.debug(f'empty component at: {i}') continue indexes = peakutils.indexes(C[i, :], thres=thres) srt_ind = indexes[np.argsort(C[i, indexes])][::-1] diff --git a/caiman/motion_correction.py b/caiman/motion_correction.py index 912cd0bea..fb186d1c4 100644 --- a/caiman/motion_correction.py +++ b/caiman/motion_correction.py @@ -2823,7 +2823,7 @@ def motion_correct_batch_rigid(fname, max_shifts, dview=None, splits=56, num_spl logger.error('The movie contains NaNs. NaNs are not allowed!') raise Exception('The movie contains NaNs. NaNs are not allowed!') else: - logger.debug('Adding to movie ' + str(add_to_movie)) + logger.debug(f'Adding to movie {add_to_movie}') save_movie = False fname_tot_rig = None diff --git a/caiman/source_extraction/cnmf/cnmf.py b/caiman/source_extraction/cnmf/cnmf.py index 4249e8f3d..05ce34a6d 100644 --- a/caiman/source_extraction/cnmf/cnmf.py +++ b/caiman/source_extraction/cnmf/cnmf.py @@ -435,9 +435,7 @@ def fit(self, images, indices=(slice(None), slice(None))): if self.params.get('patch', 'rf') is None and (is_sliced or 'ndarray' in str(type(images))): images = images[tuple(indices)] self.dview = None - logger.info("Parallel processing in a single patch " - "is not available for loaded in memory or sliced" + - " data.") + logger.info("Parallel processing in a single patch is not available for loaded in memory or sliced data.") T = images.shape[0] self.params.set('online', {'init_batch': T}) @@ -465,7 +463,8 @@ def fit(self, images, indices=(slice(None), slice(None))): # 'ss': np.ones((3,) * len(self.dims), dtype=np.uint8) # }) - logger.info(('Using ' + str(self.params.get('patch', 'n_processes')) + ' processes')) + logger.info(f"Using {self.params.get('patch', 'n_processes')} processes") + # FIXME The code below is really ugly and it's hard to tell if it's doing the right thing if self.params.get('preprocess', 'n_pixels_per_process') is None: avail_memory_per_process = psutil.virtual_memory()[ 1] / 2.**30 / self.params.get('patch', 'n_processes') diff --git a/caiman/source_extraction/cnmf/map_reduce.py b/caiman/source_extraction/cnmf/map_reduce.py index ba6cd0e4f..6a7d17e3d 100644 --- a/caiman/source_extraction/cnmf/map_reduce.py +++ b/caiman/source_extraction/cnmf/map_reduce.py @@ -14,6 +14,7 @@ from caiman.cluster import extract_patch_coordinates from caiman.mmapping import load_memmap +from caiman.source_extraction.cnmf import CNMF def cnmf_patches(args_in): """Function that is run for each patches @@ -67,16 +68,16 @@ def cnmf_patches(args_in): """ #FIXME Fix in-function imports - from . import cnmf + from caiman.source_extraction.cnmf import CNMF logger = logging.getLogger("caiman") file_name, idx_, shapes, params = args_in name_log = os.path.basename( file_name[:-5]) + '_LOG_ ' + str(idx_[0]) + '_' + str(idx_[-1]) - logger.debug(name_log + 'START') + logger.debug(name_log + ' START') - logger.debug(name_log + 'Read file') + logger.debug(name_log + ' Read file') Yr, dims, timesteps = load_memmap(file_name) # slicing array (takes the min and max index in n-dimensional space and @@ -107,7 +108,7 @@ def cnmf_patches(args_in): for group in ('preprocess', 'temporal'): opts.set(group, {'p': params.get('patch', 'p_patch')}) - cnm = cnmf.CNMF(n_processes=1, params=opts) + cnm = CNMF(n_processes=1, params=opts) cnm = cnm.fit(images) return [idx_, shapes, scipy.sparse.coo_matrix(cnm.estimates.A), diff --git a/caiman/source_extraction/cnmf/params.py b/caiman/source_extraction/cnmf/params.py index 2014860df..4d5d74108 100644 --- a/caiman/source_extraction/cnmf/params.py +++ b/caiman/source_extraction/cnmf/params.py @@ -956,15 +956,13 @@ def check_consistency(self): if len(self.motion[a]) != 3: if self.motion[a][0] == self.motion[a][1]: self.motion[a] = (self.motion[a][0],) * 3 - logger.warning("is3D=True, hence setting key " + a + - " automatically to " + str(self.motion[a])) + logger.warning(f"is3D=True, hence setting key {a} to {self.motion[a]}") else: - raise ValueError(a + ' has to be a tuple of length 3 for volumetric 3D data') + raise ValueError(f'{a} must be a tuple of length 3 for volumetric 3D data') for key in ('max_num_added', 'min_num_trial'): if (self.online[key] == 0 and self.online['update_num_comps']): self.set('online', {'update_num_comps': False}) - logger.warning(key + "=0, hence setting key update_num_comps " + - "in group online automatically to False.") + logger.warning(f"{key}=0, hence setting key online.update_num_comps to False.") # FIXME The authoritative value is stored in the init field. This should later be refactored out # into a general section, once we're passing around the CNMFParams object rather than splatting it out # from **get_group diff --git a/caiman/utils/utils.py b/caiman/utils/utils.py index 599d9b60e..54f028e3d 100644 --- a/caiman/utils/utils.py +++ b/caiman/utils/utils.py @@ -477,7 +477,7 @@ def recursively_save_dict_contents_to_group(h5file:h5py.File, path:str, dic:dict if key == 'g': if item is None: item = 0 - logger.info(key + ' is an object type') + logger.info(f'{key} is an object type') try: item = np.array(list(item)) except: @@ -522,7 +522,7 @@ def recursively_save_dict_contents_to_group(h5file:h5py.File, path:str, dic:dict elif isinstance(item, dict): recursively_save_dict_contents_to_group(h5file, path + key + '/', item) elif 'sparse' in str(type(item)): - logger.info(key + ' is sparse ****') + logger.info(f'{key} is sparse ****') h5file[path + key + '/data'] = item.tocsc().data h5file[path + key + '/indptr'] = item.tocsc().indptr h5file[path + key + '/indices'] = item.tocsc().indices @@ -576,10 +576,10 @@ def recursively_load_dict_contents_from_group(h5file:h5py.File, path:str) -> dic elif isinstance(item, h5py._hl.group.Group): if key in ('A', 'W', 'Ab', 'downscale_matrix', 'upscale_matrix'): - data = item[path + key + '/data'] + data = item[path + key + '/data'] indices = item[path + key + '/indices'] - indptr = item[path + key + '/indptr'] - shape = item[path + key + '/shape'] + indptr = item[path + key + '/indptr'] + shape = item[path + key + '/shape'] ans[key] = scipy.sparse.csc_matrix((data[:], indices[:], indptr[:]), shape[:]) if key in ('W', 'upscale_matrix'):