diff --git a/hashstash/config.py b/hashstash/config.py index f176cc0..0d45d32 100644 --- a/hashstash/config.py +++ b/hashstash/config.py @@ -130,11 +130,11 @@ def get_engine(engine): engine = OPTIMAL_ENGINE_TYPE if engine not in get_working_engines(): if engine in ENGINES: - log.warning( + log.debug( f"Engine {engine} is not installed. Defaulting to {DEFAULT_ENGINE_TYPE}. To install {engine}, run: pip install {engine}" ) else: - log.warning( + log.debug( f'Engine {engine} is not recognized. Defaulting to {DEFAULT_ENGINE_TYPE}. Choose one of: {", ".join(ENGINES)}' ) engine = DEFAULT_ENGINE_TYPE @@ -273,9 +273,9 @@ def get_compresser(compress): compress = OPTIMAL_COMPRESS if not compress in get_working_compressers(): if compress in COMPRESSERS: - log.warning(f'Compression library {compress} is not installed. Defaulting to zlib. To install {compress}, run: pip install {compress}') + log.debug(f'Compression library {compress} is not installed. Defaulting to zlib. To install {compress}, run: pip install {compress}') else: - log.warning(f'Compression library {compress} is not recognized. Defaulting to zlib. Choose one of: {", ".join(COMPRESSERS)}') + log.debug(f'Compression library {compress} is not recognized. Defaulting to zlib. Choose one of: {", ".join(COMPRESSERS)}') compress = DEFAULT_COMPRESS return compress diff --git a/hashstash/engines/dataframe.py b/hashstash/engines/dataframe.py index 49aa2e5..09049c2 100644 --- a/hashstash/engines/dataframe.py +++ b/hashstash/engines/dataframe.py @@ -120,7 +120,7 @@ def _decode_value_from_filepath(self, filepath): return super().decode_value_from_filepath(filepath) return MetaDataFrame.read(filepath, df_engine=self.df_engine, compression=self.compress) except Exception as e: - log.warning(f'error reading dataframe from {filepath}: {e}') + log.debug(f'error reading dataframe from {filepath}: {e}') return None @log.debug @@ -136,7 +136,7 @@ def items( **kwargs, ) if vals is None: - log.warning(f'empty values returned for {key}') + log.debug(f'empty values returned for {key}') else: if as_dataframe: yield key, vals diff --git a/hashstash/serializers/custom.py b/hashstash/serializers/custom.py index 1e11bf9..d0c46c3 100644 --- a/hashstash/serializers/custom.py +++ b/hashstash/serializers/custom.py @@ -88,7 +88,7 @@ def _serialize_custom(obj: Any, data:Any=None) -> Any: if hasattr(obj, '__reduce__'): return ReducerSerializer.serialize(obj) - log.warning(f"Unsupported object type: {type(obj)}") + log.debug(f"Unsupported object type: {type(obj)}") return obj @@ -343,7 +343,7 @@ def serialize(obj): result['__state_setter__'] = get_obj_addr(reduced[5]) return result except Exception as e: - log.warning(f"Error using __reduce__ for {type(obj)}: {e}") + log.debug(f"Error using __reduce__ for {type(obj)}: {e}") return None @staticmethod @@ -374,7 +374,7 @@ def deserialize(data): return obj except Exception as e: - log.warning(f"Error using safe_unreduce: {e}") + log.debug(f"Error using safe_unreduce: {e}") return None class BytesSerializer(CustomSerializer): @@ -763,7 +763,7 @@ def get_function_closure(func): else: closure_dict[name] = _serialize_custom(cell.cell_contents) except ValueError: - log.warning(f"Empty cell encountered for {name} in function {obj.__name__}") + log.debug(f"Empty cell encountered for {name} in function {obj.__name__}") closure_dict[name] = None return closure_dict if closure_dict else None diff --git a/hashstash/serializers/serializer.py b/hashstash/serializers/serializer.py index 6a5376b..b77a8d5 100644 --- a/hashstash/serializers/serializer.py +++ b/hashstash/serializers/serializer.py @@ -52,7 +52,7 @@ def deserialize(data, serializer: SERIALIZER_TYPES = None): log.trace(f"Deserialized with {deserializer_func.__name__}") return odata except Exception as e: - log.warning(f"Deserialization failed with {deserializer_func.__name__}: {str(e)}") + log.debug(f"Deserialization failed with {deserializer_func.__name__}: {str(e)}") raise e diff --git a/hashstash/utils/addrs.py b/hashstash/utils/addrs.py index d452c52..297cd3b 100644 --- a/hashstash/utils/addrs.py +++ b/hashstash/utils/addrs.py @@ -105,7 +105,7 @@ def flexible_import(obj_or_path): current += '.' except ImportError: if i == 0: - log.warning(f"Could not import module {current}") + log.debug(f"Could not import module {current}") return None try: obj = getattr(obj, part) @@ -144,7 +144,7 @@ def get_class_src(cls): lines.extend(func_lines) lines.append("") # Add an empty line after each method except OSError: - log.warning(f"Could not get source for method {name}") + log.debug(f"Could not get source for method {name}") src = "\n".join(lines) out = reformat_python_source(src) diff --git a/hashstash/utils/dataframes.py b/hashstash/utils/dataframes.py index aa6dab6..2599d3e 100644 --- a/hashstash/utils/dataframes.py +++ b/hashstash/utils/dataframes.py @@ -523,7 +523,7 @@ def set_index( except_columns={"_value"}, ): if get_dataframe_engine(df) != "pandas": # must be pandas - log.warning("can only set index on pandas df") + log.debug("can only set index on pandas df") return df assert index_columns or prefix_columns diff --git a/hashstash/utils/misc.py b/hashstash/utils/misc.py index d73fbfc..b2b82d4 100644 --- a/hashstash/utils/misc.py +++ b/hashstash/utils/misc.py @@ -76,7 +76,7 @@ def rmtreefn(dir_path): os.remove(dir_path) # log.info(f'Deleted temporary file: {dir_path}') else: - log.warning(f"Temporary path does not exist: {dir_path}") + log.debug(f"Temporary path does not exist: {dir_path}") except Exception as e: log.debug(f"Failed to delete temporary path {dir_path}: {e}")