diff --git a/milabench/testing.py b/milabench/testing.py index 9dcd455ee..aa77d1645 100644 --- a/milabench/testing.py +++ b/milabench/testing.py @@ -11,6 +11,26 @@ from milabench.utils import multilogger, validation_layers +here = Path(__file__).parent + + +def official_config(name): + p = here / ".." / "config" / f"{name}.yaml" + + if p.exists(): + return p + + raise FileNotFoundError(f"{p} does not exist") + + +def resolved_config(name): + from .config import build_config + + p = official_config(name) + + return build_config(p) + + class ReplayPackage(BasePackage): """Disable some folder creation for replay purposes""" diff --git a/tests/conftest.py b/tests/conftest.py index 3d021c570..8e6d6ce76 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,6 +2,8 @@ import stat from pathlib import Path + +from milabench.testing import official_config import pytest import voir.instruments.gpu as voirgpu @@ -21,20 +23,11 @@ def runs_folder(): def config(): def get_config(name): return here / "config" / f"{name}.yaml" - - return get_config - - - -@pytest.fixture -def official_config(): - def get_config(name): - return here / ".." / "config" / f"{name}.yaml" return get_config @pytest.fixture -def standard_config(official_config): +def standard_config(): return official_config("standard") diff --git a/tests/test_mock.py b/tests/test_mock.py index 69ab8e2ca..dc97a519f 100644 --- a/tests/test_mock.py +++ b/tests/test_mock.py @@ -1,20 +1,9 @@ +import milabench.alt_async import milabench.commands.executors +from milabench.testing import resolved_config -import traceback -from pytest import fixture - - -@fixture -def args(standard_config, tmp_path): - return [ - "--base", str(tmp_path), - "--config", str(standard_config) - ] - - -async def mock_exec(command, phase="run", timeout=False, timeout_delay=600, **kwargs): - return [0] +import pytest def run_cli(*args): @@ -27,18 +16,45 @@ def run_cli(*args): assert not exc.code -def test_milabench(monkeypatch, args): - monkeypatch.setenv("MILABENCH_GPU_ARCH", "cuda") - monkeypatch.setattr(milabench.commands, "execute_command", mock_exec) +def benchlist(enabled=True): + standard = resolved_config("standard") - run_cli("install", *args) + for key, value in standard.items(): + if value.get("enabled", False): + if key[0] != "_": + yield key - run_cli("prepare", *args) + +@pytest.fixture +def args(standard_config, tmp_path): + return [ + "--base", str(tmp_path), + "--config", str(standard_config) + ] + + +def mock_voir_run(argv, info, timeout=None, constructor=None, env=None, **options): + from voir.proc import Multiplexer + mp = Multiplexer(timeout=timeout, constructor=constructor) + mp.start(["sleep", "1"], info=info, env=env, **options) + return mp + + +@pytest.mark.parametrize("bench", benchlist()) +def test_milabench(monkeypatch, args, bench): + from milabench.cli.dry import assume_gpu + + monkeypatch.setenv("MILABENCH_GPU_ARCH", "cuda") + + run_cli("install", *args, "--select", bench) + + run_cli("prepare", *args, "--select", bench) # # use Mock GPU-SMI # - monkeypatch.setenv("MILABENCH_GPU_ARCH", "mock") - from milabench.cli.dry import assume_gpu - with assume_gpu(8): - run_cli("run", *args, "--no-report") + with monkeypatch.context() as ctx: + ctx.setattr(milabench.alt_async, "voir_run", mock_voir_run) + ctx.setenv("MILABENCH_GPU_ARCH", "mock") + with assume_gpu(8): + run_cli("run", *args, "--no-report", "--select", bench)