Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Test layers #148

Closed
wants to merge 11 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion deckard/base/files/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from pathlib import Path
from typing import Dict
from copy import deepcopy
from omegaconf import ListConfig

from ..utils import my_hash

Expand Down Expand Up @@ -72,7 +73,8 @@ def __init__(
else None
)
self.name = name if name else None
self.stage = stage if stage else None
stage = stage if stage else None
self.stage = stage[-1] if isinstance(stage, (list, ListConfig)) else stage
self.files = files if files else {}
logger.debug(f"FileConfig init: {self.files}")

Expand Down Expand Up @@ -116,6 +118,9 @@ def _set_filenames(self, **kwargs):
if attack_dir is not None
else None
)
print("reports", reports)
print("stage", stage)
input("Press Enter to continue...")
if name is None and stage is None:
path = Path(reports)
elif name is not None and stage is None:
Expand Down
26 changes: 13 additions & 13 deletions deckard/base/model/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ def __call__(self, data: list, model: object, library=None):
start = process_time_ns()
start_timestamp = time()
model.fit(data[0], data[2], **trainer)
end = process_time_ns() - start
end = process_time_ns()
end_timestamp = time()
except np.AxisError: # pragma: no cover
from art.utils import to_categorical
Expand All @@ -142,7 +142,7 @@ def __call__(self, data: list, model: object, library=None):
start = process_time_ns()
start_timestamp = time()
model.fit(data[0], data[2], **trainer)
end = process_time_ns() - start
end = process_time_ns()
end_timestamp = time()
except ValueError as e: # pragma: no cover
if "Shape of labels" in str(e):
Expand All @@ -153,7 +153,7 @@ def __call__(self, data: list, model: object, library=None):
start = process_time_ns()
start_timestamp = time()
model.fit(data[0], data[2], **trainer)
end = process_time_ns() - start
end = process_time_ns()
end_timestamp = time()
else:
raise e
Expand All @@ -165,7 +165,7 @@ def __call__(self, data: list, model: object, library=None):
start = process_time_ns()
start_timestamp = time()
model.fit(data[0], data[2], **trainer)
end = process_time_ns() - start
end = process_time_ns()
end_timestamp = time()
except Exception as e:
raise e
Expand All @@ -177,7 +177,7 @@ def __call__(self, data: list, model: object, library=None):
start = process_time_ns()
start_timestamp = time()
model.fit(data[0], data[2], **trainer)
end = process_time_ns() - start
end = process_time_ns()
end_timestamp = time()
elif "should be the same" in str(e).lower():
import torch
Expand All @@ -197,7 +197,7 @@ def __call__(self, data: list, model: object, library=None):
start = process_time_ns()
start_timestamp = time()
model.fit(data[0], data[2], **trainer)
end = process_time_ns() - start
end = process_time_ns()
end_timestamp = time()
else:
raise e
Expand Down Expand Up @@ -564,7 +564,7 @@ def predict(self, data=None, model=None, predictions_file=None):
start = process_time_ns()
start_timestamp = time()
predictions = model.predict(data[1])
end = process_time_ns() - start
end = process_time_ns()
end_timestamp = time()
except NotFittedError as e: # pragma: no cover
logger.warning(e)
Expand All @@ -582,7 +582,7 @@ def predict(self, data=None, model=None, predictions_file=None):
except Exception as e: # pragma: no cover
logger.error(e)
raise e
end = process_time_ns() - start
end = process_time_ns()
end_timestamp = time()
if predictions_file is not None:
self.data.save(predictions, predictions_file)
Expand Down Expand Up @@ -630,13 +630,13 @@ def predict_proba(self, data=None, model=None, probabilities_file=None):
start = process_time_ns()
start_timestamp = time()
predictions = model.predict_proba(data[1])
end = process_time_ns() - start
end = process_time_ns()
end_timestamp = time()
else:
start = process_time_ns()
start_timestamp = time()
predictions = model.predict(data[1])
end = process_time_ns() - start
end = process_time_ns()
end_timestamp = time()
if probabilities_file is not None:
self.data.save(predictions, probabilities_file)
Expand Down Expand Up @@ -683,19 +683,19 @@ def predict_log_loss(self, data, model, losses_file=None):
start = process_time_ns()
start_timestamp = time()
predictions = model.predict_log_proba(data[1])
end = process_time_ns() - start
end = process_time_ns()
end_timestamp = time()
elif hasattr(model, "predict_proba"):
start = process_time_ns()
start_timestamp = time()
predictions = model.predict_proba(data[1])
end = process_time_ns() - start
end = process_time_ns()
end_timestamp = time()
elif hasattr(model, "predict"):
start = process_time_ns()
start_timestamp = time()
predictions = model.predict(data[1])
end = process_time_ns() - start
end = process_time_ns()
end_timestamp = time()
else: # pragma: no cover
raise ValueError(
Expand Down
43 changes: 24 additions & 19 deletions deckard/layers/experiment.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@
import dvc.api
from hydra.utils import instantiate

from dulwich.errors import NotGitRepository
import yaml
import argparse
from copy import deepcopy
from ..base.utils import unflatten_dict
from .utils import save_params_file

logger = logging.getLogger(__name__)

Expand All @@ -27,8 +27,9 @@ def get_dvc_stage_params(
name=None,
):
logger.info(
f"Getting params for stage {stage} from {params_file} and {pipeline_file} in {directory}.",
f"Getting params for stage {stage} from {params_file} and {pipeline_file} in {Path(directory).resolve().as_posix()}.",
)
stage = [stage] if not isinstance(stage, list) else stage
params = dvc.api.params_show(stages=stage)
params.update({"_target_": "deckard.base.experiment.Experiment"})
files = dvc.api.params_show(pipeline_file, stages=stage, repo=directory)
Expand Down Expand Up @@ -69,14 +70,9 @@ def run_stage(


def get_stages(pipeline_file="dvc.yaml", stages=None, repo=None):
try:
def_stages = list(
dvc.api.params_show(pipeline_file, repo=repo)["stages"].keys(),
)
except NotGitRepository:
raise ValueError(
f"Directory {repo} is not a git repository. Please run `dvc init` in {repo} and try again.",
)
with Path(repo, pipeline_file).open("r") as f:
pipeline = yaml.safe_load(f)["stages"]
def_stages = list(pipeline.keys())
if stages is None or stages == []:
raise ValueError(f"Please specify one or more stage(s) from {def_stages}")
elif isinstance(stages, str):
Expand Down Expand Up @@ -111,23 +107,32 @@ def run_stages(stages, pipeline_file="dvc.yaml", params_file="params.yaml", repo
dvc_parser.add_argument("--verbosity", type=str, default="INFO")
dvc_parser.add_argument("--params_file", type=str, default="params.yaml")
dvc_parser.add_argument("--pipeline_file", type=str, default="dvc.yaml")
dvc_parser.add_argument("--config_dir", type=str, default="conf")
dvc_parser.add_argument("--config_dir", type=str, default=None)
dvc_parser.add_argument("--config_file", type=str, default="default")
dvc_parser.add_argument("--workdir", type=str, default=".")
dvc_parser.add_argument("--dvc_repository", type=str, default=None)
args = dvc_parser.parse_args()
config_dir = Path(args.workdir, args.config_dir).resolve().as_posix()
# save_params_file(
# config_dir=config_dir,
# config_file=args.config_file,
# params_file=args.params_file,
# )
if args.config_dir is not None:
args.config_dir = Path(args.config_dir).resolve().as_posix()
assert args.config_file is not None, "Please specify a config file."
if (
args.config_dir is not None
and Path(args.config_dir, args.config_file).is_file()
):
save_params_file(
config_dir=args.config_dir,
config_file=args.config_file,
params_file=args.params_file,
)
else:
pass
logging.basicConfig(
level=args.verbosity,
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
)
dvc_repository = args.dvc_repository if args.dvc_repository is not None else "."
results = run_stages(
stages=args.stage,
pipeline_file=args.pipeline_file,
params_file=args.params_file,
repo=args.workdir,
repo=dvc_repository,
)
83 changes: 26 additions & 57 deletions deckard/layers/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,12 @@

from hydra.errors import OverrideParseException
from omegaconf import OmegaConf
from copy import deepcopy
import yaml
from hydra import initialize_config_dir, compose


from numpy import nan
from ..base.utils import my_hash
from ..base.utils import my_hash, flatten_dict

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -58,78 +57,48 @@ def find_conf_files(
return files


def get_overrides(file: str, key: str = None, overrides=None):
if overrides is None:
overrides = {}
else:
if isinstance(overrides, str):
overrides = overrides.split(",")
if isinstance(overrides, list):
overrides = {
entry.split("=")[0]: entry.split("=")[1] for entry in overrides
}
if isinstance(overrides, dict):
new_dict = deepcopy(overrides)
for k, v in new_dict.items():
if k.startswith("++"):
overrides[k] = v
elif k.startswith("+"):
overrides[f"++{k[1:]}"] = v
elif k.startswith("~~"):
overrides[f"~~{k[2:]}"] = v
else:
overrides[f"++{k}"] = v

# assert isinstance(overrides, dict), f"Expected list, got {type(overrides)}"
# if key is not None and len(overrides) > 0:
# overrides.pop(f"{key}.name", None)
# overrides.pop(f"files.{key}_file", None)
# overrides[f"++{key}.name"] = Path(file).stem
# overrides[f"++files.{key}_file"] = Path(file).stem
# overrides[f"{key}"] = Path(file).stem
# overrides["++stage"] = key
def get_overrides(file: str, folder, overrides=None):
with open(Path(folder, file), "r") as f:
old_cfg = yaml.safe_load(f)
old_cfg = OmegaConf.create(old_cfg)
old_cfg = OmegaConf.to_container(old_cfg, resolve=True)
flat_cfg = flatten_dict(old_cfg)
overrides = [] if overrides is None else overrides
if isinstance(overrides, str):
overrides = overrides.split(",")
assert isinstance(overrides, list), f"Expected list, got {type(overrides)}"
new_overrides = []
for override in overrides:
k, v = override.split("=")
if k in flat_cfg:
k = f"++{k}"
elif k not in flat_cfg and not k.startswith("+"):
k = f"+{k}"
else:
pass
new_overrides.append(f"{k}={v}")
overrides = new_overrides
return overrides


def compose_experiment(file, config_dir, overrides=None, default_file="default.yaml"):
if hasattr(file, "as_posix"):
file = file.as_posix()
if overrides in [None, "", "None", "none", "NONE", "null", "Null", "NULL"]:
overrides = []
elif isinstance(overrides, str):
overrides = overrides.split(",")
if isinstance(overrides, list):
pass
elif isinstance(overrides, dict):
new_dict = deepcopy(overrides)
for k, v in new_dict.items():
if k.startswith("++"):
overrides[k] = v
elif k.startswith("+"):
overrides[f"++{k[1:]}"] = v
elif k.startswith("--"):
overrides[f"++{k[2:]}"] = v
else:
overrides[f"++{k}"] = v
else:
raise TypeError(f"Expected list or dict, got {type(overrides)}")
assert isinstance(file, str), f"Expected str, got {type(file)}"
# file = Path(data_conf_dir, file).as_posix()
overrides = get_overrides(file=file, folder=config_dir, overrides=overrides)
logger.info(f"Running experiment in config_dir: {config_dir}")
logger.info(f"Running experiment with config_name: {file}")
config_dir = Path(Path(), config_dir).resolve().as_posix()
OmegaConf.register_new_resolver("eval", eval)
with initialize_config_dir(config_dir=config_dir, version_base="1.3"):
try:
cfg = compose(config_name=Path(default_file).stem, overrides=overrides)
except OverrideParseException:
except OverrideParseException: # pragma: no cover
raise ValueError(f"Failed to parse overrides: {overrides}")
cfg = OmegaConf.to_container(cfg, resolve=True)
cfg["_target_"] = "deckard.Experiment"
id_ = str(my_hash(cfg))
cfg["name"] = id_
cfg["files"]["name"] = id_
return cfg
cfg = OmegaConf.create(cfg)
return cfg


def save_params_file(
Expand Down
Loading
Loading