Skip to content

Commit

Permalink
Run mock milabench on a per bench basis to identify issues more easily
Browse files Browse the repository at this point in the history
  • Loading branch information
pierre.delaunay committed Jul 9, 2024
1 parent 575ba9f commit 078e317
Show file tree
Hide file tree
Showing 3 changed files with 62 additions and 33 deletions.
20 changes: 20 additions & 0 deletions milabench/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,26 @@
from milabench.utils import multilogger, validation_layers


here = Path(__file__).parent


def official_config(name):
p = here / ".." / "config" / f"{name}.yaml"

if p.exists():
return p

raise FileNotFoundError(f"{p} does not exist")


def resolved_config(name):
from .config import build_config

p = official_config(name)

return build_config(p)


class ReplayPackage(BasePackage):
"""Disable some folder creation for replay purposes"""

Expand Down
13 changes: 3 additions & 10 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
import stat
from pathlib import Path


from milabench.testing import official_config
import pytest
import voir.instruments.gpu as voirgpu

Expand All @@ -21,20 +23,11 @@ def runs_folder():
def config():
def get_config(name):
return here / "config" / f"{name}.yaml"

return get_config



@pytest.fixture
def official_config():
def get_config(name):
return here / ".." / "config" / f"{name}.yaml"
return get_config


@pytest.fixture
def standard_config(official_config):
def standard_config():
return official_config("standard")


Expand Down
62 changes: 39 additions & 23 deletions tests/test_mock.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,9 @@

import milabench.alt_async
import milabench.commands.executors
from milabench.testing import resolved_config

import traceback
from pytest import fixture


@fixture
def args(standard_config, tmp_path):
return [
"--base", str(tmp_path),
"--config", str(standard_config)
]


async def mock_exec(command, phase="run", timeout=False, timeout_delay=600, **kwargs):
return [0]
import pytest


def run_cli(*args):
Expand All @@ -27,18 +16,45 @@ def run_cli(*args):
assert not exc.code


def test_milabench(monkeypatch, args):
monkeypatch.setenv("MILABENCH_GPU_ARCH", "cuda")
monkeypatch.setattr(milabench.commands, "execute_command", mock_exec)
def benchlist(enabled=True):
standard = resolved_config("standard")

run_cli("install", *args)
for key, value in standard.items():
if value.get("enabled", False):
if key[0] != "_":
yield key

run_cli("prepare", *args)

@pytest.fixture
def args(standard_config, tmp_path):
return [
"--base", str(tmp_path),
"--config", str(standard_config)
]


def mock_voir_run(argv, info, timeout=None, constructor=None, env=None, **options):
from voir.proc import Multiplexer
mp = Multiplexer(timeout=timeout, constructor=constructor)
mp.start(["sleep", "1"], info=info, env=env, **options)
return mp


@pytest.mark.parametrize("bench", benchlist())
def test_milabench(monkeypatch, args, bench):
from milabench.cli.dry import assume_gpu

monkeypatch.setenv("MILABENCH_GPU_ARCH", "cuda")

run_cli("install", *args, "--select", bench)

run_cli("prepare", *args, "--select", bench)

#
# use Mock GPU-SMI
#
monkeypatch.setenv("MILABENCH_GPU_ARCH", "mock")
from milabench.cli.dry import assume_gpu
with assume_gpu(8):
run_cli("run", *args, "--no-report")
with monkeypatch.context() as ctx:
ctx.setattr(milabench.alt_async, "voir_run", mock_voir_run)
ctx.setenv("MILABENCH_GPU_ARCH", "mock")
with assume_gpu(8):
run_cli("run", *args, "--no-report", "--select", bench)

0 comments on commit 078e317

Please sign in to comment.