Skip to content

Commit

Permalink
eh
Browse files Browse the repository at this point in the history
  • Loading branch information
quadrismegistus committed Sep 24, 2024
1 parent 931bebb commit 1d7be0d
Show file tree
Hide file tree
Showing 7 changed files with 15 additions and 15 deletions.
8 changes: 4 additions & 4 deletions hashstash/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,11 +130,11 @@ def get_engine(engine):
engine = OPTIMAL_ENGINE_TYPE
if engine not in get_working_engines():
if engine in ENGINES:
log.warning(
log.debug(
f"Engine {engine} is not installed. Defaulting to {DEFAULT_ENGINE_TYPE}. To install {engine}, run: pip install {engine}"
)
else:
log.warning(
log.debug(
f'Engine {engine} is not recognized. Defaulting to {DEFAULT_ENGINE_TYPE}. Choose one of: {", ".join(ENGINES)}'
)
engine = DEFAULT_ENGINE_TYPE
Expand Down Expand Up @@ -273,9 +273,9 @@ def get_compresser(compress):
compress = OPTIMAL_COMPRESS
if not compress in get_working_compressers():
if compress in COMPRESSERS:
log.warning(f'Compression library {compress} is not installed. Defaulting to zlib. To install {compress}, run: pip install {compress}')
log.debug(f'Compression library {compress} is not installed. Defaulting to zlib. To install {compress}, run: pip install {compress}')
else:
log.warning(f'Compression library {compress} is not recognized. Defaulting to zlib. Choose one of: {", ".join(COMPRESSERS)}')
log.debug(f'Compression library {compress} is not recognized. Defaulting to zlib. Choose one of: {", ".join(COMPRESSERS)}')
compress = DEFAULT_COMPRESS
return compress

Expand Down
4 changes: 2 additions & 2 deletions hashstash/engines/dataframe.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def _decode_value_from_filepath(self, filepath):
return super().decode_value_from_filepath(filepath)
return MetaDataFrame.read(filepath, df_engine=self.df_engine, compression=self.compress)
except Exception as e:
log.warning(f'error reading dataframe from {filepath}: {e}')
log.debug(f'error reading dataframe from {filepath}: {e}')
return None

@log.debug
Expand All @@ -136,7 +136,7 @@ def items(
**kwargs,
)
if vals is None:
log.warning(f'empty values returned for {key}')
log.debug(f'empty values returned for {key}')
else:
if as_dataframe:
yield key, vals
Expand Down
8 changes: 4 additions & 4 deletions hashstash/serializers/custom.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def _serialize_custom(obj: Any, data:Any=None) -> Any:
if hasattr(obj, '__reduce__'):
return ReducerSerializer.serialize(obj)

log.warning(f"Unsupported object type: {type(obj)}")
log.debug(f"Unsupported object type: {type(obj)}")
return obj


Expand Down Expand Up @@ -343,7 +343,7 @@ def serialize(obj):
result['__state_setter__'] = get_obj_addr(reduced[5])
return result
except Exception as e:
log.warning(f"Error using __reduce__ for {type(obj)}: {e}")
log.debug(f"Error using __reduce__ for {type(obj)}: {e}")
return None

@staticmethod
Expand Down Expand Up @@ -374,7 +374,7 @@ def deserialize(data):

return obj
except Exception as e:
log.warning(f"Error using safe_unreduce: {e}")
log.debug(f"Error using safe_unreduce: {e}")
return None

class BytesSerializer(CustomSerializer):
Expand Down Expand Up @@ -763,7 +763,7 @@ def get_function_closure(func):
else:
closure_dict[name] = _serialize_custom(cell.cell_contents)
except ValueError:
log.warning(f"Empty cell encountered for {name} in function {obj.__name__}")
log.debug(f"Empty cell encountered for {name} in function {obj.__name__}")
closure_dict[name] = None
return closure_dict if closure_dict else None

Expand Down
2 changes: 1 addition & 1 deletion hashstash/serializers/serializer.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def deserialize(data, serializer: SERIALIZER_TYPES = None):
log.trace(f"Deserialized with {deserializer_func.__name__}")
return odata
except Exception as e:
log.warning(f"Deserialization failed with {deserializer_func.__name__}: {str(e)}")
log.debug(f"Deserialization failed with {deserializer_func.__name__}: {str(e)}")
raise e


Expand Down
4 changes: 2 additions & 2 deletions hashstash/utils/addrs.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ def flexible_import(obj_or_path):
current += '.'
except ImportError:
if i == 0:
log.warning(f"Could not import module {current}")
log.debug(f"Could not import module {current}")
return None
try:
obj = getattr(obj, part)
Expand Down Expand Up @@ -144,7 +144,7 @@ def get_class_src(cls):
lines.extend(func_lines)
lines.append("") # Add an empty line after each method
except OSError:
log.warning(f"Could not get source for method {name}")
log.debug(f"Could not get source for method {name}")

src = "\n".join(lines)
out = reformat_python_source(src)
Expand Down
2 changes: 1 addition & 1 deletion hashstash/utils/dataframes.py
Original file line number Diff line number Diff line change
Expand Up @@ -523,7 +523,7 @@ def set_index(
except_columns={"_value"},
):
if get_dataframe_engine(df) != "pandas": # must be pandas
log.warning("can only set index on pandas df")
log.debug("can only set index on pandas df")
return df

assert index_columns or prefix_columns
Expand Down
2 changes: 1 addition & 1 deletion hashstash/utils/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def rmtreefn(dir_path):
os.remove(dir_path)
# log.info(f'Deleted temporary file: {dir_path}')
else:
log.warning(f"Temporary path does not exist: {dir_path}")
log.debug(f"Temporary path does not exist: {dir_path}")
except Exception as e:
log.debug(f"Failed to delete temporary path {dir_path}: {e}")

Expand Down

0 comments on commit 1d7be0d

Please sign in to comment.