Skip to content

Commit

Permalink
change logger.info to logger.debug
Browse files Browse the repository at this point in the history
  • Loading branch information
simplymathematics committed Aug 4, 2024
1 parent 76f77ee commit c7dac03
Show file tree
Hide file tree
Showing 10 changed files with 26 additions and 26 deletions.
16 changes: 8 additions & 8 deletions deckard/base/attack/attack.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,13 +55,13 @@ def __hash__(self):
return int(my_hash(self), 16)

def __call__(self, model=None, data=None, attack_size=-1):
logger.info(f"Fitting attack {self.name} with id: {self.__hash__()}")
logger.debug(f"Fitting attack {self.name} with id: {self.__hash__()}")
name = self.name
kwargs = deepcopy(self.kwargs)
pop_list = ["extract", "poison", "evade", "reconstruct", "infer"]
for thing in pop_list:
kwargs.pop(thing, None)
logger.info(f"Initializing attack {name} with parameters {kwargs}")
logger.debug(f"Initializing attack {name} with parameters {kwargs}")
if "x_train" in kwargs:
assert (
data is not None
Expand Down Expand Up @@ -91,7 +91,7 @@ def __call__(self, model=None, data=None, attack_size=-1):
else:
kwargs["y_val"] = y_test
try:
logger.info("Attempting black-box attack.")
logger.debug("Attempting black-box attack.")
config = {"_target_": name}
config.update(**kwargs)
attack = instantiate(config, model)
Expand Down Expand Up @@ -134,7 +134,7 @@ def __init__(
self.attack_size = attack_size
self.init = AttackInitializer(model, name, **init)
self.kwargs = kwargs
logger.info("Instantiating Attack with id: {}".format(self.__hash__()))
logger.debug("Instantiating Attack with id: {}".format(self.__hash__()))

def __hash__(self):
return int(my_hash(self), 16)
Expand Down Expand Up @@ -300,7 +300,7 @@ def __init__(
self.attack_size = attack_size
self.init = AttackInitializer(model, name, **init)
self.kwargs = kwargs
logger.info("Instantiating Attack with id: {}".format(self.__hash__()))
logger.debug("Instantiating Attack with id: {}".format(self.__hash__()))

def __hash__(self):
return int(my_hash(self), 16)
Expand Down Expand Up @@ -493,7 +493,7 @@ def __init__(
self.attack_size = attack_size
self.init = AttackInitializer(model, name, **init)
self.kwargs = kwargs
logger.info("Instantiating Attack with id: {}".format(self.__hash__()))
logger.debug("Instantiating Attack with id: {}".format(self.__hash__()))

def __hash__(self):
return int(my_hash(self), 16)
Expand Down Expand Up @@ -618,7 +618,7 @@ def __init__(
f"kwargs must be of type DictConfig or dict. Got {type(kwargs)}",
)
self.kwargs = kwargs
logger.info("Instantiating Attack with id: {}".format(self.__hash__()))
logger.debug("Instantiating Attack with id: {}".format(self.__hash__()))

def __hash__(self):
return int(my_hash(self), 16)
Expand Down Expand Up @@ -813,7 +813,7 @@ def __init__(
kwargs.update(**kwargs.pop("kwargs"))
self.kwargs = kwargs
self.name = name if name is not None else my_hash(self)
logger.info("Instantiating Attack with id: {}".format(self.name))
logger.debug("Instantiating Attack with id: {}".format(self.name))

def __call__(
self,
Expand Down
2 changes: 1 addition & 1 deletion deckard/base/data/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ def save(self, data, filename):
:param filename: str
"""
if filename is not None:
logger.info(f"Saving data to {filename}")
logger.debug(f"Saving data to {filename}")
suffix = Path(filename).suffix
Path(filename).parent.mkdir(parents=True, exist_ok=True)
if isinstance(data, dict):
Expand Down
6 changes: 3 additions & 3 deletions deckard/base/data/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ class SklearnDataGenerator:
kwargs: dict = field(default_factory=dict)

def __init__(self, name, **kwargs):
logger.info(
logger.debug(
f"Instantiating {self.__class__.__name__} with name={name} and kwargs={kwargs}",
)
self.name = name
Expand Down Expand Up @@ -91,7 +91,7 @@ class TorchDataGenerator:
kwargs: dict = field(default_factory=dict)

def __init__(self, name, path=None, **kwargs):
logger.info(
logger.debug(
f"Instantiating {self.__class__.__name__} with name={name} and kwargs={kwargs}",
)
self.name = name
Expand Down Expand Up @@ -179,7 +179,7 @@ class KerasDataGenerator:
kwargs: dict = field(default_factory=dict)

def __init__(self, name, **kwargs):
logger.info(
logger.debug(
f"Instantiating {self.__class__.__name__} with name={name} and kwargs={kwargs}",
)
self.name = name
Expand Down
2 changes: 1 addition & 1 deletion deckard/base/data/sampler.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def __init__(
self.time_series = time_series

def __call__(self, X, y):
logger.info(f"Calling SklearnDataSampler with params {asdict(self)}")
logger.debug(f"Calling SklearnDataSampler with params {asdict(self)}")
params = deepcopy(asdict(self))
stratify = params.pop("stratify", False)
if stratify is True:
Expand Down
2 changes: 1 addition & 1 deletion deckard/base/data/sklearn_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def __iter__(self):
return iter(self.pipeline)

def __call__(self, X_train, X_test, y_train, y_test):
logger.info(
logger.debug(
"Calling SklearnDataPipeline with pipeline={}".format(self.pipeline),
)
pipeline = deepcopy(self.pipeline)
Expand Down
6 changes: 3 additions & 3 deletions deckard/base/experiment/experiment.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ def __call__(self, **kwargs):
:type scorer: str
:return: The score for the specified scorer or the status of the experiment if scorer=None (default).
"""
logger.info("Running experiment with id: {}".format(self.get_name()))
logger.debug("Running experiment with id: {}".format(self.get_name()))
# Setup files, data, and model
files = deepcopy(self.files).get_filenames()

Expand Down Expand Up @@ -236,8 +236,8 @@ def __call__(self, **kwargs):
self.data.save(score_dict, files["score_dict_file"])
else: # pragma: no cover
raise ValueError("Scorer is None. Please specify a scorer.")
logger.info(f"Score for id : {self.get_name()}: {score_dict}")
logger.info("Finished running experiment with id: {}".format(self.get_name()))
logger.debug(f"Score for id : {self.get_name()}: {score_dict}")
logger.debug("Finished running experiment with id: {}".format(self.get_name()))
return score_dict

def _set_name(self):
Expand Down
2 changes: 1 addition & 1 deletion deckard/base/model/art_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def __call__(self):
data = [torch.from_numpy(d).to(device) for d in data]
data = [d.to(device) for d in data]
model.to(device)
logger.info(f"Model moved to GPU: {device}")
logger.debug(f"Model moved to GPU: {device}")
else:
device = torch.device("cpu")
model = TorchInitializer(
Expand Down
4 changes: 2 additions & 2 deletions deckard/base/model/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def __init__(self, **kwargs):
self.kwargs = kwargs

def __call__(self, data: list, model: object, library=None):
logger.info(f"Training model {model} with fit params: {self.kwargs}")
logger.debug(f"Training model {model} with fit params: {self.kwargs}")
device = str(model.device) if hasattr(model, "device") else "cpu"
trainer = self.kwargs
if library in sklearn_dict.keys():
Expand All @@ -91,7 +91,7 @@ def __call__(self, data: list, model: object, library=None):
try:
start = process_time_ns()
start_timestamp = time()
logger.info(f"Fitting type(model): {type(model)} with kwargs {trainer}")
logger.debug(f"Fitting type(model): {type(model)} with kwargs {trainer}")
model.fit(data[0], data[2], **trainer)
end = process_time_ns()
end_timestamp = time()
Expand Down
8 changes: 4 additions & 4 deletions deckard/base/model/tensorflow_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def __call__(self):
import tensorflow as tf

tf.config.run_functions_eagerly(True)
logger.info(f"Initializing model {self.name} with kwargs {self.kwargs}")
logger.debug(f"Initializing model {self.name} with kwargs {self.kwargs}")
if len(self.kwargs) > 0:
config = {"class_name": self.name, "config": self.kwargs}
else:
Expand Down Expand Up @@ -159,7 +159,7 @@ def __call__(self):
import tensorflow as tf

tf.config.run_functions_eagerly(True)
logger.info(f"Initializing model {self.name} with kwargs {self.kwargs}")
logger.debug(f"Initializing model {self.name} with kwargs {self.kwargs}")
if "kwargs" in self.kwargs:
kwargs = self.kwargs.pop("kwargs", {})
params = self.kwargs
Expand All @@ -184,7 +184,7 @@ def __call__(self):
import tensorflow as tf

tf.config.run_functions_eagerly(True)
logger.info(f"Initializing model {self.name} with kwargs {self.kwargs}")
logger.debug(f"Initializing model {self.name} with kwargs {self.kwargs}")
if "kwargs" in self.kwargs:
kwargs = self.kwargs.pop("kwargs", {})
params = self.kwargs
Expand Down Expand Up @@ -296,7 +296,7 @@ def __init__(self, name, **kwargs):
def __call__(self):
import tensorflow as tf

logger.info(f"Initializing model {self.name} with kwargs {self.kwargs}")
logger.debug(f"Initializing model {self.name} with kwargs {self.kwargs}")
if "kwargs" in self.kwargs:
kwargs = self.kwargs.pop("kwargs", {})
params = self.kwargs
Expand Down
4 changes: 2 additions & 2 deletions deckard/base/model/torch_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def __init__(self, name, **kwargs):
self.kwargs = kwargs

def __call__(self):
logger.info(f"Initializing model {self.name} with kwargs {self.kwargs}")
logger.debug(f"Initializing model {self.name} with kwargs {self.kwargs}")
params = self.kwargs
name = params.pop("_target_", self.name)
dict_ = {"_target_": name}
Expand All @@ -54,7 +54,7 @@ def __init__(self, name, **kwargs):
self.kwargs = kwargs

def __call__(self, model):
logger.info(f"Initializing model {self.name} with kwargs {self.kwargs}")
logger.debug(f"Initializing model {self.name} with kwargs {self.kwargs}")
params = self.kwargs
name = params.pop("_target_", self.name)
dict_ = {"_target_": name}
Expand Down

0 comments on commit c7dac03

Please sign in to comment.