diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 825c86f4b..8ec9ec9c6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -5,6 +5,8 @@ on: branches: [ vara, vara-dev ] pull_request: branches: [ vara, vara-dev ] + merge_group: + branches: [ vara, vara-dev ] env: BB_TMP_DIR: $(pwd)/benchbuild/tmp diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 8819a8d76..852534346 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -3,6 +3,8 @@ name: Docs CI on: pull_request: branches: [ vara, vara-dev ] + merge_group: + branches: [ vara, vara-dev ] jobs: build: diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index 1fdde6ade..a85d4f1a7 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -2,6 +2,7 @@ name: pre-commit on: pull_request: + merge_group: jobs: pre-commit: diff --git a/.github/workflows/reviewdog.yml b/.github/workflows/reviewdog.yml index 4ad4fdd5e..97ce39a28 100644 --- a/.github/workflows/reviewdog.yml +++ b/.github/workflows/reviewdog.yml @@ -3,6 +3,8 @@ name: reviewdog on: pull_request: branches: [ vara, vara-dev ] + merge_group: + branches: [ vara, vara-dev ] jobs: reviewdog: diff --git a/.github/workflows/tag.yml b/.github/workflows/tag.yml index b05e12d32..31c3265ee 100644 --- a/.github/workflows/tag.yml +++ b/.github/workflows/tag.yml @@ -17,6 +17,11 @@ jobs: - uses: actions/checkout@v2 with: fetch-depth: 0 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: '3.11' + - uses: ./.github/actions/dependenciesActions - name: Create and Publish Release run: | diff --git a/docs/source/conf.py b/docs/source/conf.py index ef57a97cf..54392b0f2 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -87,6 +87,10 @@ # modules that require this module before setting the type checking flag. import scipy.stats # isort:skip +# Matplotlib >=3.8 has a type-checking-flag-guarded import of a symbol that does +# not exist in the shipped version. +import matplotlib.pyplot # isort:skip + # The autodocs typehints plugin does not resolve circular imports caused by type # annotations, so we have to manually break the circles. import rich.console # isort:skip diff --git a/docs/source/tutorials/configuration_specific_experiments.rst b/docs/source/tutorials/configuration_specific_experiments.rst index 16479538c..77f804b6a 100644 --- a/docs/source/tutorials/configuration_specific_experiments.rst +++ b/docs/source/tutorials/configuration_specific_experiments.rst @@ -21,6 +21,7 @@ One just needs to extend the case-study file of a project with a yaml document t .. code-block:: yaml --- + config_type: PlainCommandlineConfiguration 0: '["--foo", "--bar"]' 1: '["--foo"]' ... diff --git a/docs/source/vara-ts-api/tools/vara-cs-gui.rst b/docs/source/vara-ts-api/tools/vara-cs-gui.rst index 96c3493a7..dcdd6219c 100644 --- a/docs/source/vara-ts-api/tools/vara-cs-gui.rst +++ b/docs/source/vara-ts-api/tools/vara-cs-gui.rst @@ -10,6 +10,7 @@ The gui is started by:: The gui provides 3 Strategies to generate case studies: - Manual revision selection: Select revision from the revision history of a project. Multiple revisions can be selected by holding `ctrl` and ranges by holding `shift`. Revisions which are blocked because of bugs in the compilation of the project are marked blue. + .. figure:: vara-cs-gui-manual.png - Random Sampling: Sample a number of revisions using a random a Normal or HalfNormal Distribution. diff --git a/requirements.txt b/requirements.txt index 8f5185378..1a3860088 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -benchbuild>=6.6.4 +benchbuild>=6.8 click>=8.1.3 distro>=1.5.0 graphviz>=0.14.2 diff --git a/tests/TEST_INPUTS/paper_configs/test_config_ids/SynthIPRuntime_0.case_study b/tests/TEST_INPUTS/paper_configs/test_config_ids/SynthIPRuntime_0.case_study new file mode 100644 index 000000000..723eb0149 --- /dev/null +++ b/tests/TEST_INPUTS/paper_configs/test_config_ids/SynthIPRuntime_0.case_study @@ -0,0 +1,25 @@ +--- +DocType: CaseStudy +Version: 1 +... +--- +project_name: SynthIPRuntime +stages: +- revisions: + - commit_hash: 793035062810ea3a2d9a10f831cd199fbbb82090 + commit_id: 64 + config_ids: + - 0 + - 1 + - 2 + - 3 + - 4 +version: 0 +... +--- +config_type: PlainCommandlineConfiguration +0: '["-d"]' +1: '["-c"]' +2: '["-c", "-1"]' +3: '["-c", "-2"]' +4: '["-c", "-1", "-2"]' diff --git a/tests/TEST_INPUTS/paper_configs/test_config_ids/SynthIPTemplate_0.case_study b/tests/TEST_INPUTS/paper_configs/test_config_ids/SynthIPTemplate_0.case_study new file mode 100644 index 000000000..f37a50f77 --- /dev/null +++ b/tests/TEST_INPUTS/paper_configs/test_config_ids/SynthIPTemplate_0.case_study @@ -0,0 +1,25 @@ +--- +DocType: CaseStudy +Version: 1 +... +--- +project_name: SynthIPTemplate +stages: +- revisions: + - commit_hash: 793035062810ea3a2d9a10f831cd199fbbb82090 + commit_id: 64 + config_ids: + - 0 + - 1 + - 2 + - 3 + - 4 +version: 0 +... +--- +config_type: PatchConfiguration +0: '["Decompress"]' +1: '["Compress", "no_fastmode", "no_smallmode"]' +2: '["Compress", "fastmode", "no_smallmode"]' +3: '["Compress", "no_fastmode", "smallmode"]' +4: '["Compress", "fastmode", "smallmode"]' diff --git a/tests/TEST_INPUTS/paper_configs/test_config_ids/SynthSAContextSensitivity_0.case_study b/tests/TEST_INPUTS/paper_configs/test_config_ids/SynthSAContextSensitivity_0.case_study index 2a872480b..dd8b44c42 100644 --- a/tests/TEST_INPUTS/paper_configs/test_config_ids/SynthSAContextSensitivity_0.case_study +++ b/tests/TEST_INPUTS/paper_configs/test_config_ids/SynthSAContextSensitivity_0.case_study @@ -14,6 +14,7 @@ stages: version: 0 ... --- +config_type: PlainCommandlineConfiguration 0: '["--compress", "--mem", "10", "8"]' 1: '["--compress", "--mem", "300", "8"]' ... diff --git a/tests/TEST_INPUTS/paper_configs/test_config_ids/xz_0.case_study b/tests/TEST_INPUTS/paper_configs/test_config_ids/xz_0.case_study index e1101d9be..a544fbfbc 100644 --- a/tests/TEST_INPUTS/paper_configs/test_config_ids/xz_0.case_study +++ b/tests/TEST_INPUTS/paper_configs/test_config_ids/xz_0.case_study @@ -10,5 +10,6 @@ stages: config_ids: [1] version: 0 --- +config_type: PlainCommandlineConfiguration 0: '["--foo", "--bar"]' 1: '["--foo"]' \ No newline at end of file diff --git a/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/exclude-revision-range.info b/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/exclude-revision-range.info new file mode 100644 index 000000000..66b6e46fc --- /dev/null +++ b/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/exclude-revision-range.info @@ -0,0 +1,8 @@ +description: Patch that excludes a range of commits (Otherwise includes all) +exclude_revisions: + revision_range: + end: 8ca5cc28e6746eef7340064b5d843631841bf31e + start: 01f9f1f07bef22d4248e8349aba4f0c1f204607e +path: bug.patch +project_name: FeaturePerfCSCollection +shortname: exclude-revision-range diff --git a/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/exclude-single-and-revision-range.info b/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/exclude-single-and-revision-range.info new file mode 100644 index 000000000..44a149e99 --- /dev/null +++ b/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/exclude-single-and-revision-range.info @@ -0,0 +1,10 @@ +description: Patch that excludes a certain range and individual commits (Otherwise + includes all) +exclude_revisions: + revision_range: + end: 8ca5cc28e6746eef7340064b5d843631841bf31e + start: 01f9f1f07bef22d4248e8349aba4f0c1f204607e + single_revision: 27f17080376e409860405c40744887d81d6b3f34 +path: bug.patch +project_name: FeaturePerfCSCollection +shortname: exclude-single-and-revision-range diff --git a/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/exclude-single-revision.info b/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/exclude-single-revision.info new file mode 100644 index 000000000..f15dd79aa --- /dev/null +++ b/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/exclude-single-revision.info @@ -0,0 +1,6 @@ +description: Patch that is valid for all commits except a single one +exclude_revisions: + single_revision: 8ca5cc28e6746eef7340064b5d843631841bf31e +path: bug.patch +project_name: FeaturePerfCSCollection +shortname: exclude-single-revision diff --git a/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/include-range-exclude-range.info b/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/include-range-exclude-range.info new file mode 100644 index 000000000..f65d12fdd --- /dev/null +++ b/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/include-range-exclude-range.info @@ -0,0 +1,12 @@ +description: Patch valid for a range of commits where a subrange is explicitly excluded +exclude_revisions: + revision_range: + end: a94fb35ca49719028a1c50bdbc2fb82122043f46 + start: c051e44a973ee31b3baa571407694467a513ba68 +include_revisions: + revision_range: + end: 4300ea495e7f013f68e785fdde5c4ead81297999 + start: 01f9f1f07bef22d4248e8349aba4f0c1f204607e +path: bug.patch +project_name: FeaturePerfCSCollection +shortname: include-range-exclude-range diff --git a/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/include-range-exclude-single.info b/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/include-range-exclude-single.info new file mode 100644 index 000000000..dae52ba93 --- /dev/null +++ b/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/include-range-exclude-single.info @@ -0,0 +1,10 @@ +description: Patch valid for a range of commits where a single one is excluded +exclude_revisions: + single_revision: 162db88346b06be20faac6976f1ff9bad986accf +include_revisions: + revision_range: + end: 8ca5cc28e6746eef7340064b5d843631841bf31e + start: 01f9f1f07bef22d4248e8349aba4f0c1f204607e +path: bug.patch +project_name: FeaturePerfCSCollection +shortname: include-range-exclude-single diff --git a/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/include-revision-range.info b/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/include-revision-range.info new file mode 100644 index 000000000..94d9e4f62 --- /dev/null +++ b/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/include-revision-range.info @@ -0,0 +1,8 @@ +description: Patch that is valid for a range of commits +include_revisions: + revision_range: + end: 8ca5cc28e6746eef7340064b5d843631841bf31e + start: 01f9f1f07bef22d4248e8349aba4f0c1f204607e +path: bug.patch +project_name: FeaturePerfCSCollection +shortname: include-revision-range diff --git a/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/include-single-and-revision-range.info b/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/include-single-and-revision-range.info new file mode 100644 index 000000000..85698d69b --- /dev/null +++ b/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/include-single-and-revision-range.info @@ -0,0 +1,9 @@ +description: Patch that is valid for a revision range AND another single commit +include_revisions: + revision_range: + end: 8ca5cc28e6746eef7340064b5d843631841bf31e + start: 01f9f1f07bef22d4248e8349aba4f0c1f204607e + single_revision: 27f17080376e409860405c40744887d81d6b3f34 +path: bug.patch +project_name: FeaturePerfCSCollection +shortname: include-single-and-revision-range diff --git a/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/include-single-revision.info b/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/include-single-revision.info new file mode 100644 index 000000000..ed9c836f3 --- /dev/null +++ b/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/include-single-revision.info @@ -0,0 +1,6 @@ +description: Patch that is valid for a single revision +include_revisions: + single_revision: 8ca5cc28e6746eef7340064b5d843631841bf31e +path: bug.patch +project_name: FeaturePerfCSCollection +shortname: include-single-revision diff --git a/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/unrestricted-range.info b/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/unrestricted-range.info new file mode 100644 index 000000000..b2e96bf38 --- /dev/null +++ b/tests/TEST_INPUTS/patch_configs/FeaturePerfCSCollection/unrestricted-range.info @@ -0,0 +1,5 @@ +description: Patch describing an unrestricted range of commits, meaning it should + include all commits +path: bug.patch +project_name: FeaturePerfCSCollection +shortname: unrestricted-range diff --git a/tests/TEST_INPUTS/results/SynthSAContextSensitivity/BBBaseO-CR-SynthSAContextSensitivity-ContextSense-06eac0edb6/8380144f-9a25-44c6-8ce0-08d0a29c677b_config-1_success.zip b/tests/TEST_INPUTS/results/SynthSAContextSensitivity/BBBaseO-CR-SynthSAContextSensitivity-ContextSense-06eac0edb6/8380144f-9a25-44c6-8ce0-08d0a29c677b_config-1_success.zip new file mode 100644 index 000000000..df6194dc1 Binary files /dev/null and b/tests/TEST_INPUTS/results/SynthSAContextSensitivity/BBBaseO-CR-SynthSAContextSensitivity-ContextSense-06eac0edb6/8380144f-9a25-44c6-8ce0-08d0a29c677b_config-1_success.zip differ diff --git a/tests/TEST_INPUTS/results/SynthSAContextSensitivity/BBBaseO-CR-SynthSAContextSensitivity-ContextSense-06eac0edb6/b24ee2c1-fc85-47ba-abbd-90c98e88a37c_config-0_success.zip b/tests/TEST_INPUTS/results/SynthSAContextSensitivity/BBBaseO-CR-SynthSAContextSensitivity-ContextSense-06eac0edb6/b24ee2c1-fc85-47ba-abbd-90c98e88a37c_config-0_success.zip new file mode 100644 index 000000000..02e155887 Binary files /dev/null and b/tests/TEST_INPUTS/results/SynthSAContextSensitivity/BBBaseO-CR-SynthSAContextSensitivity-ContextSense-06eac0edb6/b24ee2c1-fc85-47ba-abbd-90c98e88a37c_config-0_success.zip differ diff --git a/tests/data/test_report.py b/tests/data/test_report.py index fdb8d1196..f9e5cf9ab 100644 --- a/tests/data/test_report.py +++ b/tests/data/test_report.py @@ -156,6 +156,14 @@ def test_get_uuid(self): self.assertEqual(self.report_filename.uuid, self.correct_UUID) self.assertRaises(ValueError, lambda: self.broken_report_filename.uuid) + def test_experiment_shorthand_parsing_with_path_in_name(self) -> None: + """Checks that we correctly parse the experiment shorthand also in cases + where we have a path as part of the filename.""" + prefixed = ReportFilename( + "/tmp/foobar/" + self.report_filename.filename + ) + self.assertEqual(prefixed.experiment_shorthand, "CRE") + class TestConfigReportFilename(unittest.TestCase): """Test configuration specific ReportFilename functionality.""" diff --git a/tests/experiment/test_workload_util.py b/tests/experiment/test_workload_util.py index 729dedc24..1486c9aa5 100644 --- a/tests/experiment/test_workload_util.py +++ b/tests/experiment/test_workload_util.py @@ -6,8 +6,15 @@ from benchbuild.source.base import Revision, Variant import varats.experiment.workload_util as wu +from tests.helper_utils import run_in_test_environment, UnitTestFixtures +from varats.paper.paper_config import load_paper_config from varats.projects.c_projects.xz import Xz +from varats.projects.perf_tests.feature_perf_cs_collection import ( + SynthIPTemplate, + SynthIPRuntime, +) from varats.utils.git_util import ShortCommitHash +from varats.utils.settings import vara_cfg TT = PathToken.make_token(RootRenderer()) @@ -52,6 +59,73 @@ def test_workload_commands_tags_selected(self) -> None: ) self.assertEqual(len(commands), 1) + def test_workload_commands_requires(self) -> None: + revision = Revision(Xz, Variant(Xz.SOURCE[0], "c5c7ceb08a")) + project = Xz(revision=revision) + binary = Xz.binaries_for_revision(ShortCommitHash("c5c7ceb08a"))[0] + + commands = wu.workload_commands( + project, binary, [wu.WorkloadCategory.EXAMPLE] + ) + self.assertEqual(len(commands), 1) + commands = wu.workload_commands( + project, binary, [wu.WorkloadCategory.MEDIUM] + ) + self.assertEqual(len(commands), 1) + + @run_in_test_environment(UnitTestFixtures.PAPER_CONFIGS) + def test_workload_config_param_token(self) -> None: + vara_cfg()['paper_config']['current_config'] = "test_config_ids" + load_paper_config() + + revision = Revision( + SynthIPRuntime, Variant(SynthIPRuntime.SOURCE[0], "7930350628"), + Variant(SynthIPRuntime.SOURCE[1], "1") + ) + project = SynthIPRuntime(revision=revision) + binary = SynthIPRuntime.binaries_for_revision( + ShortCommitHash("7930350628") + )[0] + + commands = wu.workload_commands( + project, binary, [wu.WorkloadCategory.SMALL] + ) + self.assertEqual(len(commands), 1) + command = commands[0] + args = command.command.rendered_args(project=project) + self.assertEquals(args, tuple(["-c"])) + + @run_in_test_environment(UnitTestFixtures.PAPER_CONFIGS) + def test_workload_commands_requires_patch(self) -> None: + vara_cfg()['paper_config']['current_config'] = "test_config_ids" + load_paper_config() + + revision = Revision( + SynthIPTemplate, Variant(SynthIPTemplate.SOURCE[0], "7930350628"), + Variant(SynthIPTemplate.SOURCE[1], "1") + ) + project = SynthIPTemplate(revision=revision) + binary = SynthIPTemplate.binaries_for_revision( + ShortCommitHash("7930350628") + )[0] + workloads = wu.workload_commands(project, binary, []) + self.assertEqual(2, len(workloads)) + + @run_in_test_environment(UnitTestFixtures.PAPER_CONFIGS) + def test_workload_commands_requires_patch2(self) -> None: + vara_cfg()['paper_config']['current_config'] = "test_config_ids" + load_paper_config() + + revision = Revision( + SynthIPTemplate, Variant(SynthIPTemplate.SOURCE[0], "7930350628"), + Variant(SynthIPTemplate.SOURCE[1], "0") + ) + project = SynthIPTemplate(revision=revision) + binary = SynthIPTemplate \ + .binaries_for_revision(ShortCommitHash("7930350628"))[0] + workloads = wu.workload_commands(project, binary, []) + self.assertEqual(0, len(workloads)) + class TestWorkloadFilenames(unittest.TestCase): diff --git a/tests/paper/test_case_study.py b/tests/paper/test_case_study.py index 6d8412ee9..6a18fbbf3 100644 --- a/tests/paper/test_case_study.py +++ b/tests/paper/test_case_study.py @@ -48,6 +48,7 @@ commit_id: 494 ... --- +config_type: ConfigurationImpl 0: '{"foo": true, "bar": false, "bazz": "bazz-value", "buzz": "None"}' 1: '{}' 2: '{}' diff --git a/tests/paper_mgmt/test_case_study.py b/tests/paper_mgmt/test_case_study.py index 01e22ab56..7a70b3325 100644 --- a/tests/paper_mgmt/test_case_study.py +++ b/tests/paper_mgmt/test_case_study.py @@ -239,7 +239,8 @@ def test_get_newest_result_files_for_case_study_with_empty_res_dir( UnitTestFixtures.PAPER_CONFIGS, UnitTestFixtures.RESULT_FILES ) def test_get_newest_result_files_for_case_study_with_config(self) -> None: - """Check that when we have two files, the newes one get's selected.""" + """Check that when we have two files that differ in their config id, + both get selected.""" vara_cfg()['paper_config']['current_config'] = "test_config_ids" load_paper_config() @@ -273,7 +274,56 @@ def test_get_newest_result_files_for_case_study_with_config(self) -> None: self.assertEqual(newest_res_filenames[0].config_id, 0) self.assertEqual(newest_res_filenames[1].config_id, 1) - self.assertEqual(len(newest_res_filenames), 2) + self.assertEqual(newest_res_filenames[2].config_id, 0) + self.assertEqual(newest_res_filenames[3].config_id, 1) + self.assertEqual(len(newest_res_filenames), 4) + + @run_in_test_environment( + UnitTestFixtures.PAPER_CONFIGS, UnitTestFixtures.RESULT_FILES + ) + def test_get_newest_result_files_for_case_study_with_diff_exp(self) -> None: + """Check that when we have two files that differ in their experiment + shorthand, both get selected.""" + vara_cfg()['paper_config']['current_config'] = "test_config_ids" + load_paper_config() + + config_0_file = ReportFilename( + "BBBase-CR-SynthSAContextSensitivity-ContextSense-06eac0edb6/" + "b24ee2c1-fc85-47ba-abbd-90c98e88a37c_config-0_success.zip" + ) + config_1_file = ReportFilename( + "BBBaseO-CR-SynthSAContextSensitivity-ContextSense-06eac0edb6/" + "b24ee2c1-fc85-47ba-abbd-90c98e88a37c_config-0_success.zip" + ) + + now = datetime.now().timestamp() + file_path_0 = Path( + str(vara_cfg()['result_dir']) + ) / 'SynthSAContextSensitivity' / config_0_file.filename + os.utime(file_path_0, (now, now)) + + file_path_1 = Path( + str(vara_cfg()['result_dir']) + ) / 'SynthSAContextSensitivity' / config_1_file.filename + os.utime(file_path_1, (now, now)) + + newest_res_files = MCS.get_newest_result_files_for_case_study( + get_paper_config().get_case_studies('SynthSAContextSensitivity')[0], + Path(vara_cfg()['result_dir'].value), CR + ) + + newest_res_files.sort(reverse=True) + newest_res_filenames = [ReportFilename(x) for x in newest_res_files] + + self.assertEqual( + newest_res_filenames[0].experiment_shorthand, "BBBaseO" + ) + self.assertEqual( + newest_res_filenames[1].experiment_shorthand, "BBBaseO" + ) + self.assertEqual(newest_res_filenames[2].experiment_shorthand, "BBBase") + self.assertEqual(newest_res_filenames[3].experiment_shorthand, "BBBase") + self.assertEqual(len(newest_res_filenames), 4) def test_get_case_study_file_name_filter_empty(self) -> None: """Check that we correctly handle case study filter generation even if diff --git a/tests/provider/test_patch_provider.py b/tests/provider/test_patch_provider.py new file mode 100644 index 000000000..9b55604e7 --- /dev/null +++ b/tests/provider/test_patch_provider.py @@ -0,0 +1,429 @@ +import unittest +from copy import deepcopy +from pathlib import Path + +import benchbuild as bb +from benchbuild.source.base import target_prefix +from benchbuild.utils.revision_ranges import _get_git_for_path + +from tests.helper_utils import TEST_INPUTS_DIR +from varats.projects.perf_tests.feature_perf_cs_collection import ( + FeaturePerfCSCollection, +) +from varats.provider.patch.patch_provider import PatchProvider, Patch, PatchSet +from varats.utils.git_util import ShortCommitHash + + +class TestPatchProvider(unittest.TestCase): + + def test_correct_patch_config_access(self): + """Checks if we get a correct path for accessing the PatchConfig.""" + provider = PatchProvider.create_provider_for_project( + FeaturePerfCSCollection + ) + self.assertIsNotNone(provider) + + def test_get_patch_by_shortname(self): + provider = PatchProvider.create_provider_for_project( + FeaturePerfCSCollection + ) + self.assertIsNotNone(provider) + + patch = provider.get_by_shortname("compile-error") + self.assertIsNotNone(patch) + + patch = provider.get_by_shortname("dummy-patch") + self.assertIsNone(patch) + + +class TestPatchRevisionRanges(unittest.TestCase): + + @classmethod + def setUpClass(cls) -> None: + cls.patch_base_path = Path( + TEST_INPUTS_DIR / 'patch_configs/FeaturePerfCSCollection/' + ) + + project_git_source = bb.source.Git( + remote="https://github.com/se-sic/FeaturePerfCSCollection.git", + local="FeaturePerfCSCollection", + refspec="origin/HEAD", + shallow=False, + ) + + project_git_source.fetch() + + repo_git = _get_git_for_path( + target_prefix() + "/FeaturePerfCSCollection" + ) + + cls.all_revisions = { + ShortCommitHash(h) for h in + repo_git('log', '--pretty=%H', '--first-parent').strip().split() + } + + def __test_patch_revisions( + self, shortname: str, expected_revisions: set[ShortCommitHash] + ): + patch = Patch.from_yaml(self.patch_base_path / f"{shortname}.info") + + self.assertSetEqual(expected_revisions, patch.valid_revisions) + + def test_unrestricted_range(self): + self.__test_patch_revisions("unrestricted-range", self.all_revisions) + + def test_include_single_revision(self): + expected_revisions = { + ShortCommitHash("8ca5cc28e6746eef7340064b5d843631841bf31e") + } + + self.__test_patch_revisions( + "include-single-revision", expected_revisions + ) + + def test_include_revision_range(self): + expected_revisions = { + ShortCommitHash("01f9f1f07bef22d4248e8349aba4f0c1f204607e"), + ShortCommitHash("8ca5cc28e6746eef7340064b5d843631841bf31e"), + ShortCommitHash("c051e44a973ee31b3baa571407694467a513ba68"), + ShortCommitHash("162db88346b06be20faac6976f1ff9bad986accf"), + ShortCommitHash("745424e3ae1d521ae42e7486df126075d9c37be9") + } + + self.__test_patch_revisions( + "include-revision-range", expected_revisions + ) + + def test_included_single_and_revision_range(self): + expected_revisions = { + ShortCommitHash("01f9f1f07bef22d4248e8349aba4f0c1f204607e"), + ShortCommitHash("8ca5cc28e6746eef7340064b5d843631841bf31e"), + ShortCommitHash("c051e44a973ee31b3baa571407694467a513ba68"), + ShortCommitHash("162db88346b06be20faac6976f1ff9bad986accf"), + ShortCommitHash("745424e3ae1d521ae42e7486df126075d9c37be9"), + ShortCommitHash("27f17080376e409860405c40744887d81d6b3f34") + } + + self.__test_patch_revisions( + "include-single-and-revision-range", expected_revisions + ) + + def test_exclude_single_revision(self): + expected_revisions = deepcopy(self.all_revisions) + expected_revisions.remove( + ShortCommitHash("8ca5cc28e6746eef7340064b5d843631841bf31e") + ) + + self.__test_patch_revisions( + "exclude-single-revision", expected_revisions + ) + + def test_exclude_revision_range(self): + expected_revisions = deepcopy(self.all_revisions) + expected_revisions.difference_update({ + ShortCommitHash("01f9f1f07bef22d4248e8349aba4f0c1f204607e"), + ShortCommitHash("8ca5cc28e6746eef7340064b5d843631841bf31e"), + ShortCommitHash("c051e44a973ee31b3baa571407694467a513ba68"), + ShortCommitHash("162db88346b06be20faac6976f1ff9bad986accf"), + ShortCommitHash("745424e3ae1d521ae42e7486df126075d9c37be9") + }) + + self.__test_patch_revisions( + "exclude-revision-range", expected_revisions + ) + + def test_exclude_single_and_revision_range(self): + expected_revisions = deepcopy(self.all_revisions) + expected_revisions.difference_update({ + ShortCommitHash("01f9f1f07bef22d4248e8349aba4f0c1f204607e"), + ShortCommitHash("8ca5cc28e6746eef7340064b5d843631841bf31e"), + ShortCommitHash("c051e44a973ee31b3baa571407694467a513ba68"), + ShortCommitHash("162db88346b06be20faac6976f1ff9bad986accf"), + ShortCommitHash("745424e3ae1d521ae42e7486df126075d9c37be9"), + ShortCommitHash("27f17080376e409860405c40744887d81d6b3f34") + }) + + self.__test_patch_revisions( + "exclude-single-and-revision-range", expected_revisions + ) + + def test_include_range_exclude_single(self): + expected_revisions = { + ShortCommitHash("01f9f1f07bef22d4248e8349aba4f0c1f204607e"), + ShortCommitHash("8ca5cc28e6746eef7340064b5d843631841bf31e"), + ShortCommitHash("c051e44a973ee31b3baa571407694467a513ba68"), + ShortCommitHash("745424e3ae1d521ae42e7486df126075d9c37be9") + } + + self.__test_patch_revisions( + "include-range-exclude-single", expected_revisions + ) + + def test_include_range_exclude_range(self): + expected_revisions = { + ShortCommitHash("01f9f1f07bef22d4248e8349aba4f0c1f204607e"), + ShortCommitHash("4300ea495e7f013f68e785fdde5c4ead81297999"), + ShortCommitHash("27f17080376e409860405c40744887d81d6b3f34"), + ShortCommitHash("32b28ee90e2475cf44d7a616101bcaba2396168d"), + ShortCommitHash("162db88346b06be20faac6976f1ff9bad986accf"), + ShortCommitHash("745424e3ae1d521ae42e7486df126075d9c37be9") + } + + self.__test_patch_revisions( + "include-range-exclude-range", expected_revisions + ) + + +class TestPatchSet(unittest.TestCase): + + @classmethod + def setUpClass(cls) -> None: + patches = { + Patch( + "TEST", + "Test-ABCD", + "", + path=Path("test.patch"), + tags={"A", "B", "C", "D"}, + feature_tags={"F_A", "F_B", "F_C", "F_D"} + ), + Patch( + "TEST", + "Test-A", + "", + path=Path("test.patch"), + tags={"A"}, + feature_tags={"F_A"} + ), + Patch( + "TEST", + "Test-B", + "", + path=Path("test.patch"), + tags={"B"}, + feature_tags={"F_B"} + ), + Patch( + "TEST", + "Test-C", + "", + path=Path("test.patch"), + tags={"C"}, + feature_tags={"F_C"} + ), + Patch( + "TEST", + "Test-D", + "", + path=Path("test.patch"), + tags={"D"}, + feature_tags={"F_D"} + ), + Patch( + "TEST", + "Test-AB", + "", + path=Path("test.patch"), + tags={"A", "B"}, + feature_tags={"F_A", "F_B"} + ), + Patch( + "TEST", + "Test-AC", + "", + path=Path("test.patch"), + tags={"A", "C"}, + feature_tags={"F_A", "F_C"} + ), + Patch( + "TEST", + "Test-AD", + "", + path=Path("test.patch"), + tags={"A", "D"}, + feature_tags={"F_A", "F_D"} + ), + Patch( + "TEST", + "Test-BC", + "", + path=Path("test.patch"), + tags={"B", "C"}, + feature_tags={"F_B", "F_C"} + ), + Patch( + "TEST", + "Test-BD", + "", + path=Path("test.patch"), + tags={"B", "D"}, + feature_tags={"F_B", "F_D"} + ), + Patch( + "TEST", + "Test-CD", + "", + path=Path("test.patch"), + tags={"C", "D"}, + feature_tags={"F_C", "F_D"} + ), + Patch( + "TEST", + "Test-ABC", + "", + path=Path("test.patch"), + tags={"A", "B", "C"}, + feature_tags={"F_A", "F_B", "F_C"} + ), + Patch( + "TEST", + "Test-ABD", + "", + path=Path("test.patch"), + tags={"A", "B", "D"}, + feature_tags={"F_A", "F_B", "F_D"} + ), + Patch( + "TEST", + "Test-ACD", + "", + path=Path("test.patch"), + tags={"A", "C", "D"}, + feature_tags={"F_A", "F_C", "F_D"} + ), + Patch( + "TEST", + "Test-BCD", + "", + path=Path("test.patch"), + tags={"B", "C", "D"}, + feature_tags={"F_B", "F_C", "F_D"} + ), + } + + cls.patchSet = PatchSet(patches) + + def test_bracket_single_tag(self): + for tag in {"A", "B", "C", "D"}: + patches = self.patchSet[tag] + self.assertEqual(8, len(patches)) + + for patch in patches: + self.assertIn(tag, patch.shortname) + + def test_bracket_multiple_tags(self): + tags_count = {("A", "B"): 4, + ("C", "B"): 4, + ("D", "B"): 4, + ("A", "B", "C"): 2, + ("A", "B", "C", "D"): 1} + + for tags in tags_count: + patches = self.patchSet[tags] + + self.assertEqual(tags_count[tags], len(patches)) + + for patch in patches: + for tag in tags: + self.assertIn(tag, patch.tags) + + def test_all_of_single_tag(self): + for tag in {"A", "B", "C", "D"}: + patches = self.patchSet.all_of(tag) + self.assertEqual(8, len(patches)) + + for patch in patches: + self.assertIn(tag, patch.shortname) + + def test_all_of_multiple_tags(self): + tags_count = {("A", "B"): 4, + ("C", "B"): 4, + ("D", "B"): 4, + ("A", "B", "C"): 2, + ("A", "B", "C", "D"): 1} + + for tags in tags_count: + patches = self.patchSet.all_of(tags) + + self.assertEqual(tags_count[tags], len(patches)) + + for patch in patches: + for tag in tags: + self.assertIn(tag, patch.tags) + + def test_any_of_single_tag(self): + for tag in {"A", "B", "C", "D"}: + patches = self.patchSet.any_of(tag) + self.assertEqual(8, len(patches)) + + for patch in patches: + self.assertIn(tag, patch.shortname) + + def test_any_of_multiple_tags(self): + tags_count = {("A", "B"): 12, + ("C", "B"): 12, + ("D", "B"): 12, + ("A", "B", "C"): 14, + ("A", "B", "C", "D"): 15} + + for tags in tags_count: + patches = self.patchSet.any_of(tags) + + self.assertEqual(tags_count[tags], len(patches)) + + for patch in patches: + any([tag in patch.tags for tag in tags]) + + def test_all_of_single_feature_tag(self): + for tag in {"F_A", "F_B", "F_C", "F_D"}: + patches = self.patchSet.all_of_features([tag]) + self.assertEqual(8, len(patches)) + + def test_all_of_multiple_feature_tags(self): + tags_count = {("F_A", "F_B"): 4, + ("F_C", "F_B"): 4, + ("F_D", "F_B"): 4, + ("F_A", "F_B", "F_C"): 2, + ("F_A", "F_B", "F_C", "F_D"): 1} + + for tags in tags_count: + patches = self.patchSet.all_of_features(tags) + self.assertEqual(tags_count[tags], len(patches)) + + def test_any_of_single_feature_tag(self): + for tag in {"F_A", "F_B", "F_C", "F_D"}: + patches = self.patchSet.any_of_features([tag]) + self.assertEqual(8, len(patches)) + + def test_any_of_multiple_feature_tags(self): + tags_count = {("F_A", "F_B"): 12, + ("F_C", "F_B"): 12, + ("F_D", "F_B"): 12, + ("F_A", "F_B", "F_C"): 14, + ("F_A", "F_B", "F_C", "F_D"): 15} + + for tags in tags_count: + patches = self.patchSet.any_of_features(tags) + self.assertEqual(tags_count[tags], len(patches)) + + def test_patchset_intersection(self): + patches = self.patchSet["A"] & self.patchSet["B"] + + self.assertEqual(4, len(patches)) + + patches = patches & self.patchSet["C"] + self.assertEqual(2, len(patches)) + + patches = patches & self.patchSet["D"] + self.assertEqual(1, len(patches)) + + def test_patchset_union(self): + patches = self.patchSet["A"] | self.patchSet["B"] + + self.assertEqual(12, len(patches)) + + patches = patches | self.patchSet["C"] + self.assertEqual(14, len(patches)) + + patches = patches | self.patchSet["D"] + self.assertEqual(15, len(patches)) diff --git a/tests/report/test_gnu_time_report.py b/tests/report/test_gnu_time_report.py index 80a74b550..d836c1929 100644 --- a/tests/report/test_gnu_time_report.py +++ b/tests/report/test_gnu_time_report.py @@ -18,13 +18,13 @@ Average total size (kbytes): 0 Maximum resident set size (kbytes): 1804 Average resident set size (kbytes): 0 - Major (requiring I/O) page faults: 0 + Major (requiring I/O) page faults: 2 Minor (reclaiming a frame) page faults: 142 Voluntary context switches: 1 Involuntary context switches: 1 Swaps: 0 - File system inputs: 0 - File system outputs: 0 + File system inputs: 1 + File system outputs: 2 Socket messages sent: 0 Socket messages received: 0 Signals delivered: 0 @@ -63,6 +63,12 @@ def test_max_resident_size(self): with self.assertRaises(WrongTimeReportFormat): TimeReport._parse_max_resident_size(" Something other timed:") + def test_major_page_faults(self): + """Test if we correctly parse the amount of major page faults from the + input line.""" + with self.assertRaises(WrongTimeReportFormat): + TimeReport._parse_major_page_faults(" Something other timed:") + def test_max_resident_size_byte_type(self): """Test if we correctly parse the max resident size from the input line.""" @@ -97,6 +103,30 @@ def test_system_time(self): """Test if we can extract the system time from the parsed file.""" self.assertEqual(self.report.system_time, timedelta(seconds=3)) + def test_wall_clock_time(self): + """Test if we can extract the wall clock time from the parsed file.""" + self.assertEqual(self.report.wall_clock_time, timedelta(seconds=42)) + + def test_max_resident_size(self) -> None: + """Test if we can extract the max resident size from the parsed file.""" + self.assertEqual(self.report.max_res_size, 1804) + + def test_major_page_faults(self) -> None: + """Test if we can extract the number of major page faults from the + parsed file.""" + self.assertEqual(self.report.major_page_faults, 2) + + def test_minor_page_faults(self) -> None: + """Test if we can extract the number of minor page faults from the + parsed file.""" + self.assertEqual(self.report.minor_page_faults, 142) + + def test_filesystem_io(self) -> None: + """Test if we can extract the number of filesystem inputs/outputs from + the parsed file.""" + self.assertEqual(self.report.filesystem_io[0], 1) + self.assertEqual(self.report.filesystem_io[1], 2) + def test_repr_str(self): """Test string representation of TimeReports.""" expected_result = """Command: echo diff --git a/tests/report/test_linux_perf_report.py b/tests/report/test_linux_perf_report.py new file mode 100644 index 000000000..19f3d0781 --- /dev/null +++ b/tests/report/test_linux_perf_report.py @@ -0,0 +1,102 @@ +"""Test LinuxPerfReport.""" + +import unittest +from pathlib import Path +from unittest import mock + +from varats.report.linux_perf_report import LinuxPerfReport + +PERF_REPORT_1 = """# started on Sun Jul 23 22:51:54 2023 + + + Performance counter stats for 'echo foo:bar': + + 0.30 msec task-clock:u # 0.406 CPUs utilized + 0 context-switches:u # 0.000 /sec + 0 cpu-migrations:u # 0.000 /sec + 64 page-faults:u # 212.723 K/sec + 360,721 cycles:u # 1.199 GHz + 26,199 stalled-cycles-frontend:u # 7.26% frontend cycles idle + 111,008 stalled-cycles-backend:u # 30.77% backend cycles idle + 200,655 instructions:u # 0.56 insn per cycle + # 0.55 stalled cycles per insn + 48,631 branches:u # 161.639 M/sec + 3,012 branch-misses:u # 6.19% of all branches + L1-dcache-loads:u (0.00%) + L1-dcache-load-misses:u (0.00%) + LLC-loads:u + LLC-load-misses:u + + 0.000741511 seconds time elapsed + + 0.000000000 seconds user + 0.000822000 seconds sys + + + +""" + +PERF_REPORT_2 = """# started on Sun Jul 23 22:44:31 2023 + + + Performance counter stats for 'foobar': + + 1.23 msec task-clock:u # 0.000 CPUs utilized + 0 context-switches:u # 0.000 /sec + 0 cpu-migrations:u # 0.000 /sec + 132 page-faults:u # 107.572 K/sec + 850,975 cycles:u # 0.693 GHz (12.81%) + 140,154 stalled-cycles-frontend:u # 16.47% frontend cycles idle + 1,012,322 stalled-cycles-backend:u # 118.96% backend cycles idle + 1,785,912 instructions:u # 2.10 insn per cycle + # 0.57 stalled cycles per insn + 325,708 branches:u # 265.433 M/sec + 11,160 branch-misses:u # 3.43% of all branches + 840,918 L1-dcache-loads:u # 685.298 M/sec (87.19%) + L1-dcache-load-misses:u (0.00%) + LLC-loads:u + LLC-load-misses:u + + 5.945920439 seconds time elapsed + + 0.000376000 seconds user + 0.001390000 seconds sys + + + +""" + + +class TestLinuxPerfReport(unittest.TestCase): + """Tests if the Linux perf report can be loaded correctly.""" + + report_1: LinuxPerfReport + report_2: LinuxPerfReport + + @classmethod + def setUpClass(cls) -> None: + """Load Linux perf report.""" + with mock.patch( + "builtins.open", new=mock.mock_open(read_data=PERF_REPORT_1) + ): + cls.report_1 = LinuxPerfReport(Path("fake_file_path")) + + with mock.patch( + "builtins.open", new=mock.mock_open(read_data=PERF_REPORT_2) + ): + cls.report_2 = LinuxPerfReport(Path("fake_file_path")) + + def test_task_clock_parsing(self) -> None: + """Checks if we correctly parsed the value for task clock.""" + self.assertEqual(self.report_1.elapsed_time, 0.000741511) + self.assertEqual(self.report_2.elapsed_time, 5.945920439) + + def test_context_switches_parsing(self) -> None: + """Checks if we correctly parsed the value for context switches.""" + self.assertEqual(self.report_1.ctx_switches, 0) + self.assertEqual(self.report_2.ctx_switches, 0) + + def test_branch_misses_parsing(self) -> None: + """Checks if we correctly parsed the value for branch misses.""" + self.assertEqual(self.report_1.branch_misses, 3012) + self.assertEqual(self.report_2.branch_misses, 11160) diff --git a/tests/report/test_multi_patch_report.py b/tests/report/test_multi_patch_report.py new file mode 100644 index 000000000..e192e1a79 --- /dev/null +++ b/tests/report/test_multi_patch_report.py @@ -0,0 +1,76 @@ +"""Test MultiPatchReport.""" + +import unittest +from pathlib import Path + +from varats.provider.patch.patch_provider import Patch +from varats.report.multi_patch_report import MultiPatchReport + + +class TestMultiPatchReport(unittest.TestCase): + """Tests if the basic components of MultiPatchReport are working.""" + + def test_baseline_report_name(self) -> None: + """Tests if baseline report names are correctly created and checked.""" + baseline_report_name = MultiPatchReport.create_baseline_report_name( + "my_base.txt" + ) + + self.assertEqual(baseline_report_name, "baseline_my_base.txt") + self.assertTrue( + MultiPatchReport.is_baseline_report(baseline_report_name) + ) + + self.assertFalse( + MultiPatchReport.is_baseline_report(baseline_report_name[1:]) + ) + + def test_patched_report_name(self) -> None: + """Tests if patched report names are correctly created and checked.""" + patch_shortname = "shortname" + patch = Patch("MyPatch", patch_shortname, "desc", Path()) + patched_report_name = MultiPatchReport.create_patched_report_name( + patch, "my_base.txt" + ) + + self.assertEqual( + patched_report_name, + f"patched_{len(patch_shortname)}_{patch_shortname}_my_base.txt" + ) + self.assertTrue(MultiPatchReport.is_patched_report(patched_report_name)) + self.assertFalse( + MultiPatchReport.is_baseline_report(patched_report_name) + ) + + self.assertFalse( + MultiPatchReport.is_baseline_report(patched_report_name[1:]) + ) + + def test_patched_report_parsing(self) -> None: + """Test if we can correctly parse patch shortnames.""" + patch_shortname = "shortname" + patch = Patch("MyPatch", patch_shortname, "desc", Path()) + patched_report_name = MultiPatchReport.create_patched_report_name( + patch, "my_base.txt" + ) + + self.assertEqual( + MultiPatchReport. + _parse_patch_shorthand_from_report_name(patched_report_name), + patch_shortname + ) + + def test_patched_report_parsing_with_extra_underscores(self) -> None: + """Test special parsing case where the patch shortname contains + underscores.""" + patch_shortname = "sh_ort_name" + patch = Patch("MyPatch", patch_shortname, "desc", Path()) + patched_report_name = MultiPatchReport.create_patched_report_name( + patch, "my_base.txt" + ) + + self.assertEqual( + MultiPatchReport. + _parse_patch_shorthand_from_report_name(patched_report_name), + patch_shortname + ) diff --git a/tests/test_containers.py b/tests/test_containers.py index 5125071b7..fa48a6e9b 100644 --- a/tests/test_containers.py +++ b/tests/test_containers.py @@ -32,6 +32,10 @@ class TestImageBase(unittest.TestCase): def test_distro(self) -> None: self.assertEqual(Distro.DEBIAN, ImageBase.DEBIAN_10.distro) + def test_distro_version_number(self) -> None: + self.assertEqual(10, ImageBase.DEBIAN_10.version) + self.assertEqual(12, ImageBase.DEBIAN_12.version) + class TestContainerSupport(unittest.TestCase): """Test container support related functionality.""" @@ -96,7 +100,7 @@ def test_create_stage_10_from_pip(self) -> None: self.check_layer_type(layers[0], FromLayer) varats_core_install_layer = self.check_layer_type(layers[2], RunLayer) - self.assertEqual("pip3", varats_core_install_layer.command) + self.assertEqual("pip", varats_core_install_layer.command) self.assertTupleEqual( ("install", "--ignore-installed", "varats-core", "varats"), varats_core_install_layer.args @@ -123,7 +127,7 @@ def test_create_stage_10_from_source(self) -> None: self.check_layer_type(layers[0], FromLayer) varats_core_install_layer = self.check_layer_type(layers[4], RunLayer) - self.assertEqual("pip3", varats_core_install_layer.command) + self.assertEqual("pip", varats_core_install_layer.command) self.assertTupleEqual(("install", "/varats/varats-core"), varats_core_install_layer.args) mounting_parameters = "type=bind,src=varats_src,target=/varats" @@ -133,7 +137,7 @@ def test_create_stage_10_from_source(self) -> None: varats_core_install_layer.kwargs) varats_install_layer = self.check_layer_type(layers[5], RunLayer) - self.assertEqual("pip3", varats_install_layer.command) + self.assertEqual("pip", varats_install_layer.command) self.assertTupleEqual(("install", "/varats/varats"), varats_install_layer.args) mounting_parameters = "type=bind,src=varats_src,target=/varats" diff --git a/tests/utils/test_experiment_util.py b/tests/utils/test_experiment_util.py index 90061005b..b82ea7e03 100644 --- a/tests/utils/test_experiment_util.py +++ b/tests/utils/test_experiment_util.py @@ -24,8 +24,16 @@ from varats.project.project_util import BinaryType, ProjectBinaryWrapper from varats.project.varats_project import VProject from varats.projects.c_projects.xz import Xz +from varats.projects.perf_tests.feature_perf_cs_collection import ( + SynthIPTemplate, +) from varats.report.gnu_time_report import TimeReport from varats.report.report import FileStatusExtension, ReportSpecification +from varats.utils.config import ( + get_current_config_id, + get_extra_config_options, + get_config_patches, +) from varats.utils.git_util import ShortCommitHash from varats.utils.settings import vara_cfg, bb_cfg @@ -399,7 +407,7 @@ class TestConfigID(unittest.TestCase): def test_get_current_config_id_no_config(self) -> None: revision = Revision(Xz, Variant(Xz.SOURCE[0], "c5c7ceb08a")) project = Xz(revision=revision) - self.assertEqual(EU.get_current_config_id(project), None) + self.assertEqual(get_current_config_id(project), None) def test_get_current_config_id(self) -> None: revision = Revision( @@ -407,7 +415,7 @@ def test_get_current_config_id(self) -> None: Variant(Xz.SOURCE[1], "42") ) project = Xz(revision=revision) - self.assertEqual(EU.get_current_config_id(project), 42) + self.assertEqual(get_current_config_id(project), 42) @run_in_test_environment(UnitTestFixtures.PAPER_CONFIGS) def test_get_extra_config_options(self) -> None: @@ -418,4 +426,21 @@ def test_get_extra_config_options(self) -> None: Xz, Variant(Xz.SOURCE[0], "c5c7ceb08a"), Variant(Xz.SOURCE[1], "1") ) project = Xz(revision=revision) - self.assertEqual(EU.get_extra_config_options(project), ["--foo"]) + self.assertEqual(get_extra_config_options(project), ["--foo"]) + + @run_in_test_environment(UnitTestFixtures.PAPER_CONFIGS) + def test_get_config_patches(self) -> None: + vara_cfg()['paper_config']['current_config'] = "test_config_ids" + load_paper_config() + + revision = Revision( + SynthIPTemplate, Variant(SynthIPTemplate.SOURCE[0], "7930350628"), + Variant(SynthIPTemplate.SOURCE[1], "4") + ) + project = SynthIPTemplate(revision=revision) + patches = get_config_patches(project) + self.assertEqual(len(patches), 1) + self.assertEqual( + list(patches)[0].feature_tags, + ["Compress", "fastmode", "smallmode"] + ) diff --git a/tests/utils/test_git_util.py b/tests/utils/test_git_util.py index a2dc084ec..c11818665 100644 --- a/tests/utils/test_git_util.py +++ b/tests/utils/test_git_util.py @@ -2,7 +2,7 @@ import unittest from pathlib import Path -from benchbuild.utils.revision_ranges import RevisionRange +from benchbuild.utils.revision_ranges import RevisionRange, SingleRevision from varats.project.project_util import ( get_local_project_git, @@ -549,6 +549,36 @@ def test_specification_validity_range_binaries(self) -> None: self.assertIn("SingleLocalMultipleRegions", self.rv_map) + def test_specification_validity_range_multiple_binaries(self) -> None: + """Check if we can add binaries to the map that are only valid in a + specific range.""" + self.rv_map.specify_binary( + "build/bin/SingleLocalMultipleRegions", + BinaryType.EXECUTABLE, + only_valid_in=RevisionRange("162db88346", "master") + ) + self.rv_map.specify_binary( + "build/bin/SingleLocalSimple", + BinaryType.EXECUTABLE, + only_valid_in=RevisionRange("162db88346", "master") + ) + + self.assertEqual(len(self.rv_map[ShortCommitHash("162db88346")]), 2) + + self.assertIn("SingleLocalMultipleRegions", self.rv_map) + self.assertIn("SingleLocalSimple", self.rv_map) + + def test_specification_single_revision(self) -> None: + """Check if we can add binaries that are only valid with a single + revision.""" + self.rv_map.specify_binary( + "build/bin/SingleLocalMultipleRegions", + BinaryType.EXECUTABLE, + only_valid_in=SingleRevision("162db88346") + ) + + self.assertIn("SingleLocalMultipleRegions", self.rv_map) + def test_specification_binaries_with_special_name(self) -> None: """Check if we can add binaries that have a special name.""" self.rv_map.specify_binary( diff --git a/varats-core/setup.py b/varats-core/setup.py index 995999e9b..86750bfed 100644 --- a/varats-core/setup.py +++ b/varats-core/setup.py @@ -3,14 +3,14 @@ setup( name='varats-core', - version='13.0.4', + version='13.0.5', url='https://github.com/se-sic/vara-tool-suite', packages=find_namespace_packages(include=['varats.*']), namespace_packages=["varats"], setup_requires=["pytest-runner", "setuptools_scm"], tests_require=["pytest", "pytest-cov"], install_requires=[ - "benchbuild>=6.7", + "benchbuild>=6.8", "ijson>=3.1.4", "plumbum>=1.6", "PyGithub>=1.58", diff --git a/varats-core/varats/base/configuration.py b/varats-core/varats/base/configuration.py index 0c667dd2e..cdf6cb8e5 100644 --- a/varats-core/varats/base/configuration.py +++ b/varats-core/varats/base/configuration.py @@ -414,3 +414,49 @@ def get_config_value(self, option_name: str) -> tp.Optional[tp.Any]: def unfreeze(self) -> Configuration: return self + + +class PatchConfiguration(Configuration): + """Configuration class for projects where configuring is done by applying a + patch.""" + + def __init__(self, patch_feature_tags: tp.Set[str]): + self.__patch_feature_tags: tp.Set[ConfigurationOption] = { + ConfigurationOptionImpl(tag, tag) for tag in patch_feature_tags + } + + @staticmethod + def create_configuration_from_str(config_str: str) -> Configuration: + patch_feature_tags = json.loads(config_str) + return PatchConfiguration(patch_feature_tags) + + def add_config_option(self, option: ConfigurationOption) -> None: + self.__patch_feature_tags.add(option) + + def set_config_option(self, option_name: str, value: tp.Any) -> None: + self.__patch_feature_tags = { + option for option in self.__patch_feature_tags + if option.name != option_name + } + self.add_config_option(ConfigurationOptionImpl(option_name, value)) + + def get_config_value(self, option_name: str) -> tp.Optional[tp.Any]: + filtered_options = filter( + lambda option: (option.name == option_name), + self.__patch_feature_tags + ) + return any(filtered_options) + + def options(self) -> tp.List[ConfigurationOption]: + return list(self.__patch_feature_tags) + + def dump_to_string(self) -> str: + return ", ".join( + map(lambda option: str(option.value), self.__patch_feature_tags) + ) + + def freeze(self) -> FrozenConfiguration: + return FrozenConfiguration(deepcopy(self)) + + def unfreeze(self) -> Configuration: + return self diff --git a/varats-core/varats/experiment/experiment_util.py b/varats-core/varats/experiment/experiment_util.py index bbb6c8d9e..78e2aee8c 100644 --- a/varats-core/varats/experiment/experiment_util.py +++ b/varats-core/varats/experiment/experiment_util.py @@ -10,7 +10,6 @@ from collections import defaultdict from pathlib import Path from types import TracebackType -from typing import Protocol, runtime_checkable from benchbuild import source from benchbuild.experiment import Experiment @@ -23,10 +22,8 @@ from plumbum.commands.base import BoundCommand import varats.revision.revisions as revs -from varats.base.configuration import PlainCommandlineConfiguration -from varats.paper.paper_config import get_paper_config +from varats.experiment.steps.patch import ApplyPatch from varats.project.project_util import ProjectBinaryWrapper -from varats.project.sources import FeatureSource from varats.project.varats_project import VProject from varats.report.report import ( BaseReport, @@ -35,7 +32,7 @@ ReportSpecification, ReportFilename, ) -from varats.utils.config import load_configuration_map_for_case_study +from varats.utils.config import get_config_patches from varats.utils.git_util import ShortCommitHash from varats.utils.settings import vara_cfg, bb_cfg @@ -551,11 +548,17 @@ def __run_children(self, tmp_folder: Path) -> tp.List[StepResult]: def __call__(self) -> StepResult: results: tp.List[StepResult] = [] + exception_raised_during_exec = False with ZippedReportFolder(self.__output_filepath.full_path()) as tmp_dir: - results = self.__run_children(Path(tmp_dir)) + try: + results = self.__run_children(Path(tmp_dir)) + except: # noqa: E722 + exception_raised_during_exec = True + raise overall_step_result = max(results) if results else StepResult.OK - if overall_step_result is not StepResult.OK: + if overall_step_result is not StepResult.OK \ + or exception_raised_during_exec: error_filepath = self.__output_filepath.with_status( FileStatusExtension.FAILED ) @@ -670,60 +673,19 @@ def create_new_failed_result_filepath( ) -def get_current_config_id(project: VProject) -> tp.Optional[int]: +def get_config_patch_steps(project: VProject) -> tp.MutableSequence[Step]: """ - Get, if available, the current config id of project. Should the project be - not configuration specific ``None`` is returned. + Get a list of actions that apply all configuration patches to the project. Args: - project: to extract the config id from + project: the project to be configured Returns: - config_id if available for the given project + the actions that configure the project """ - if project.active_revision.has_variant(FeatureSource.LOCAL_KEY): - return int( - project.active_revision.variant_by_name(FeatureSource.LOCAL_KEY - ).version + return list( + map( + lambda patch: ApplyPatch(project, patch), + get_config_patches(project) ) - - return None - - -def get_extra_config_options(project: VProject) -> tp.List[str]: - """ - Get extra program options that were specified in the particular - configuration of \a Project. - - Args: - project: to get the extra options for - - Returns: - list of command line options as string - """ - config_id = get_current_config_id(project) - if config_id is None: - return [] - - paper_config = get_paper_config() - case_studies = paper_config.get_case_studies(cs_name=project.name) - - if len(case_studies) > 1: - raise AssertionError( - "Cannot handle multiple case studies of the same project." - ) - - case_study = case_studies[0] - - config_map = load_configuration_map_for_case_study( - paper_config, case_study, PlainCommandlineConfiguration ) - - config = config_map.get_configuration(config_id) - - if config is None: - raise AssertionError( - "Requested config id was not in the map, but should be" - ) - - return list(map(lambda option: option.value, config.options())) diff --git a/varats-core/varats/experiment/steps/patch.py b/varats-core/varats/experiment/steps/patch.py new file mode 100644 index 000000000..e03fd63a3 --- /dev/null +++ b/varats-core/varats/experiment/steps/patch.py @@ -0,0 +1,76 @@ +import textwrap +from pathlib import Path + +from benchbuild.utils import actions +from benchbuild.utils.actions import StepResult +from plumbum import ProcessExecutionError + +from varats.project.varats_project import VProject +from varats.provider.patch.patch_provider import Patch +from varats.utils.git_commands import apply_patch, revert_patch + + +class ApplyPatch(actions.ProjectStep): + """Apply a patch to a project.""" + + NAME = "APPLY_PATCH" + DESCRIPTION = "Apply a Git patch to a project." + + def __init__(self, project: VProject, patch: Patch) -> None: + super().__init__(project) + self.__patch = patch + + def __call__(self) -> StepResult: + try: + print( + f"Applying {self.__patch.shortname} to " + f"{self.project.source_of_primary}" + ) + apply_patch(Path(self.project.source_of_primary), self.__patch.path) + + except ProcessExecutionError: + self.status = StepResult.ERROR + + self.status = StepResult.OK + + return StepResult.OK + + def __str__(self, indent: int = 0) -> str: + return textwrap.indent( + f"* {self.project.name}: Apply patch " + f"{self.__patch.shortname}", " " * indent + ) + + +class RevertPatch(actions.ProjectStep): + """Revert a patch from a project.""" + + NAME = "REVERT_PATCH" + DESCRIPTION = "Revert a Git patch from a project." + + def __init__(self, project, patch): + super().__init__(project) + self.__patch = patch + + def __call__(self) -> StepResult: + try: + print( + f"Reverting {self.__patch.shortname} on " + f"{self.project.source_of_primary}" + ) + revert_patch( + Path(self.project.source_of_primary), self.__patch.path + ) + + except ProcessExecutionError: + self.status = StepResult.ERROR + + self.status = StepResult.OK + + return StepResult.OK + + def __str__(self, indent: int = 0) -> str: + return textwrap.indent( + f"* {self.project.name}: Revert patch " + f"{self.__patch.shortname}", " " * indent + ) diff --git a/varats-core/varats/experiment/workload_util.py b/varats-core/varats/experiment/workload_util.py index 8cf66daff..3d4286414 100644 --- a/varats-core/varats/experiment/workload_util.py +++ b/varats-core/varats/experiment/workload_util.py @@ -11,6 +11,7 @@ from pathlib import Path from benchbuild.command import ( + ArgsToken, PathToken, ProjectCommand, unwrap, @@ -19,9 +20,12 @@ Command, ) +from varats.base.configuration import PlainCommandlineConfiguration from varats.project.project_util import ProjectBinaryWrapper +from varats.project.varats_command import VProjectCommand from varats.project.varats_project import VProject from varats.report.report import KeyedReportAggregate, ReportTy +from varats.utils.config import get_config, get_extra_config_options from varats.utils.exceptions import auto_unwrap @@ -66,6 +70,28 @@ def specify_binary(binary_name: str) -> PathToken: RSBinary = specify_binary +class ConfigurationParameterRenderer: + + def __init__(self, *default_args: str) -> None: + self.__default_args = default_args + + def unrendered(self) -> str: + return f"" + + def rendered(self, project: VProject, + **kwargs: tp.Any) -> tp.Tuple[str, ...]: + if get_config(project, PlainCommandlineConfiguration) is None: + return self.__default_args + return tuple(get_extra_config_options(project)) + + +def specify_configuration_parameters(*default_args: str) -> ArgsToken: + return ArgsToken.make_token(ConfigurationParameterRenderer(*default_args)) + + +ConfigParams = specify_configuration_parameters + + def workload_commands( project: VProject, binary: ProjectBinaryWrapper, requested_workload_tags: tp.List[WorkloadCategory] @@ -84,17 +110,18 @@ def workload_commands( if requested_workload_tags: run_only = WorkloadSet(*requested_workload_tags) - project_cmds: tp.List[ProjectCommand] = [ - ProjectCommand(project, workload_cmd) + project_cmds: tp.List[VProjectCommand] = [ + VProjectCommand(project, workload_cmd) for workload_cmd in itertools.chain( * filter_workload_index(run_only, unwrap(project.workloads, project)) ) ] - return list( - filter(lambda prj_cmd: prj_cmd.path.name == binary.name, project_cmds) - ) + return [ + cmd for cmd in project_cmds + if cmd.path.name == binary.name and cmd.can_be_executed() + ] def create_workload_specific_filename( diff --git a/varats-core/varats/mapping/configuration_map.py b/varats-core/varats/mapping/configuration_map.py index f472c7d00..71a71122e 100644 --- a/varats-core/varats/mapping/configuration_map.py +++ b/varats-core/varats/mapping/configuration_map.py @@ -141,6 +141,7 @@ def create_configuration_map_from_yaml_doc( """ new_config_map = ConfigurationMap() + yaml_doc.pop("config_type", None) for config_id in sorted(yaml_doc): parsed_config = concrete_config_type.create_configuration_from_str( diff --git a/varats-core/varats/paper/case_study.py b/varats-core/varats/paper/case_study.py index 627e96d01..3fb087596 100644 --- a/varats-core/varats/paper/case_study.py +++ b/varats-core/varats/paper/case_study.py @@ -169,7 +169,7 @@ def get_config_ids_for_revision(self, revision: CommitHash) -> tp.List[int]: Returns a list of all configuration IDs specified for this revision. Args: - revision: i.e., a commit hash registed in this ``CSStage`` + revision: i.e., a commit hash registered in this ``CSStage`` Returns: list of config IDs """ @@ -580,11 +580,19 @@ def load_configuration_map_from_case_study_file( version_header.raise_if_not_type("CaseStudy") version_header.raise_if_version_is_less_than(1) - next(documents) # Skip case study yaml-doc + next(documents) # skip case study document + try: + while True: + document = next(documents) - return create_configuration_map_from_yaml_doc( - next(documents), concrete_config_type - ) + if document["config_type"] == concrete_config_type.__name__: + break + + return create_configuration_map_from_yaml_doc( + document, concrete_config_type + ) + except StopIteration: + return ConfigurationMap() def store_case_study(case_study: CaseStudy, case_study_location: Path) -> None: diff --git a/varats-core/varats/project/project_domain.py b/varats-core/varats/project/project_domain.py index d7108ecaf..071ed9605 100644 --- a/varats-core/varats/project/project_domain.py +++ b/varats-core/varats/project/project_domain.py @@ -18,6 +18,7 @@ class ProjectDomains(Enum): EDITOR = "Editor" FILE_FORMAT = "File format" HW_EMULATOR = "Hardware emulator" + HPC = "High Performance Applications" PARSER = "Parser" PLANNING = "Planning" PROG_LANG = "Programming language" diff --git a/varats-core/varats/project/varats_command.py b/varats-core/varats/project/varats_command.py new file mode 100644 index 000000000..3a78a5630 --- /dev/null +++ b/varats-core/varats/project/varats_command.py @@ -0,0 +1,118 @@ +"""Custom version of benchbuild's Command for use with the VaRA-Tool-Suite.""" +import typing as tp + +from benchbuild.command import Command, ProjectCommand, PathToken + +from varats.utils.config import get_config_patches + +if tp.TYPE_CHECKING: + from plumbum.commands.base import BoundEnvCommand + + from varats.project.varats_project import VProject + + +class VCommand(Command): # type: ignore [misc] + """ + Wrapper around benchbuild's Command class. + + Attributes: + requires_any_args: any of these command line args must be available for + successful execution. + requires_all_args: all of these command line args must be available for + successful execution. + requires_any_patch: any of these patch feature-tags must be available for + successful execution. + requires_all_patch: all of these patch feature-tags must be available for + successful execution. + """ + + _requires: tp.Set[str] + + def __init__( + self, + *args: tp.Any, + requires_any_args: tp.Optional[tp.Set[str]] = None, + requires_all_args: tp.Optional[tp.Set[str]] = None, + requires_any_patch: tp.Optional[tp.Set[str]] = None, + requires_all_patch: tp.Optional[tp.Set[str]] = None, + redirect_stdin: tp.Optional[PathToken] = None, + redirect_stdout: tp.Optional[PathToken] = None, + **kwargs: tp.Union[str, tp.List[str]], + ) -> None: + + super().__init__(*args, **kwargs) + self._requires_any_args = requires_any_args or set() + self._requires_all_args = requires_all_args or set() + self._requires_any_patch = requires_any_patch or set() + self._requires_all_patch = requires_all_patch or set() + self._redirect_stdin = redirect_stdin + self._redirect_stdout = redirect_stdout + + @property + def requires_any_args(self) -> tp.Set[str]: + return self._requires_any_args + + @property + def requires_all_args(self) -> tp.Set[str]: + return self._requires_all_args + + @property + def requires_any_patch(self) -> tp.Set[str]: + return self._requires_any_patch + + @property + def requires_all_patch(self) -> tp.Set[str]: + return self._requires_all_patch + + def as_plumbum(self, **kwargs: tp.Any) -> 'BoundEnvCommand': + cmd = super().as_plumbum(**kwargs) + + if self._redirect_stdin: + cmd = cmd < str(self._redirect_stdin.render(**kwargs)) + + if self._redirect_stdout: + cmd = cmd > str(self._redirect_stdout.render(**kwargs)) + + return cmd + + +class VProjectCommand(ProjectCommand): # type: ignore + + def __init__(self, project: 'VProject', command: Command): + super().__init__(project, command) + self.v_command = command if isinstance(command, VCommand) else None + self.v_project = project + + def can_be_executed(self) -> bool: + """ + Checks whether this command can be executed with the given + configuration. + + Returns: + whether this command can be executed + """ + # non-VCommands do not support filtering by configuration, so we default + # to using them as-is + if self.v_command is None: + return True + + all_args = set(self.v_command.rendered_args(project=self.v_project)) + all_patch_tags: tp.Set[str] = set() + + for patch in get_config_patches(self.v_project): + if patch.feature_tags: + all_patch_tags.update(patch.feature_tags) + + return bool(( + not self.v_command.requires_any_args or + all_args.intersection(self.v_command.requires_any_args) + ) and ( + not self.v_command.requires_all_args or + self.v_command.requires_all_args.issubset(all_args) + ) and ( + not self.v_command.requires_any_patch or + all_patch_tags.intersection(self.v_command.requires_any_patch) + ) and ( + not self.v_command.requires_all_patch or + self.v_command.requires_all_patch.issubset(all_patch_tags) + )) diff --git a/varats-core/varats/provider/patch/__init__.py b/varats-core/varats/provider/patch/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/varats-core/varats/provider/patch/patch_provider.py b/varats-core/varats/provider/patch/patch_provider.py new file mode 100644 index 000000000..27ebf31ba --- /dev/null +++ b/varats-core/varats/provider/patch/patch_provider.py @@ -0,0 +1,357 @@ +""" +Module for the :class:`PatchProvider`. + +The patch provider enables users to query patches for project, which can be +applied during an experiment to alter the state of the project. +""" + +import os +import typing as tp +import warnings +from pathlib import Path + +import benchbuild as bb +import yaml +from benchbuild.project import Project +from benchbuild.source.base import target_prefix +from yaml import YAMLError + +from varats.project.project_util import get_local_project_git_path +from varats.provider.provider import Provider, ProviderType +from varats.utils.filesystem_util import lock_file +from varats.utils.git_commands import pull_current_branch, fetch_repository +from varats.utils.git_util import ( + CommitHash, + ShortCommitHash, + get_all_revisions_between, + get_initial_commit, +) + + +class Patch: + """A class for storing a single project-specific Patch.""" + + def __init__( + self, + project_name: str, + shortname: str, + description: str, + path: Path, + valid_revisions: tp.Optional[tp.Set[CommitHash]] = None, + tags: tp.Optional[tp.Set[str]] = None, + feature_tags: tp.Optional[tp.Set[str]] = None + ): + self.project_name: str = project_name + self.shortname: str = shortname + self.description: str = description + self.path: Path = path + self.valid_revisions: tp.Set[ + CommitHash] = valid_revisions if valid_revisions else set() + self.tags: tp.Optional[tp.Set[str]] = tags + self.feature_tags: tp.Optional[tp.Set[str]] = feature_tags + + @staticmethod + def from_yaml(yaml_path: Path) -> 'Patch': + """Creates a Patch from a YAML file.""" + + yaml_dict = yaml.safe_load(yaml_path.read_text()) + + project_name = yaml_dict["project_name"] + shortname = yaml_dict["shortname"] + description = yaml_dict["description"] + path = yaml_dict["path"] + # Convert to full qualified path, as we know that path is relative to + # the yaml info file. + path = yaml_path.parent / path + + tags = yaml_dict.get("tags") + feature_tags = yaml_dict.get("feature_tags") + + project_git_path = get_local_project_git_path(project_name) + + # Update repository to have all upstream changes + fetch_repository(project_git_path) + + def parse_revisions( + rev_dict: tp.Dict[str, tp.Any] + ) -> tp.Set[CommitHash]: + res: tp.Set[CommitHash] = set() + + if "single_revision" in rev_dict: + if isinstance(rev_dict["single_revision"], str): + res.add(ShortCommitHash(rev_dict["single_revision"])) + else: + res.update([ + ShortCommitHash(r) for r in rev_dict["single_revision"] + ]) + + if "revision_range" in rev_dict: + rev_ranges = rev_dict["revision_range"] + if not isinstance(rev_ranges, list): + rev_ranges = [rev_ranges] + for rev_range in rev_ranges: + if "end" in rev_range: + end_rev = rev_range["end"] + else: + end_rev = "" + res.update( + get_all_revisions_between( + rev_range["start"], end_rev, ShortCommitHash, + project_git_path + ) + ) + + return res + + include_revisions: tp.Set[CommitHash] + if "include_revisions" in yaml_dict: + include_revisions = parse_revisions(yaml_dict["include_revisions"]) + else: + include_revisions = set( + get_all_revisions_between( + get_initial_commit(project_git_path).hash, "", + ShortCommitHash, project_git_path + ) + ) + + if "exclude_revisions" in yaml_dict: + include_revisions.difference_update( + parse_revisions(yaml_dict["exclude_revisions"]) + ) + + return Patch( + project_name, shortname, description, path, include_revisions, tags, + feature_tags + ) + + def __repr__(self) -> str: + return str(self) + + def __str__(self) -> str: + valid_revs = [str(r) for r in self.valid_revisions + ] if self.valid_revisions else [] + str_representation = f"""Patch( + ProjectName: {self.project_name} + Shortname: {self.shortname} + Path: {self.path} + ValidRevs: {valid_revs} +) +""" + + return str_representation + + def __hash__(self) -> int: + hash_args = [self.shortname, self.path] + if self.tags: + hash_args += tuple(self.tags) + if self.feature_tags: + hash_args += tuple(self.feature_tags) + + return hash(tuple(hash_args)) + + +class PatchSet: + """A PatchSet is a storage container for project specific patches that can + easily be accessed via the tags of a patch.""" + + def __init__(self, patches: tp.Union[tp.Set[Patch], tp.FrozenSet[Patch]]): + self.__patches: tp.FrozenSet[Patch] = frozenset(patches) + + def __iter__(self) -> tp.Iterator[Patch]: + return self.__patches.__iter__() + + def __contains__(self, value: tp.Any) -> bool: + return self.__patches.__contains__(value) + + def __len__(self) -> int: + return len(self.__patches) + + def __getitem__(self, tags: tp.Union[str, tp.Iterable[str]]) -> 'PatchSet': + """ + Overrides the bracket operator of a PatchSet. + + Returns a PatchSet, such that all patches include all the tags given + """ + # TODO: Discuss if we really want this. Currently this is an "all_of" + # access We could consider to remove the bracket operator and only + # provide the all_of/any_of accessors as it would be clearer what the + # exact behavior is + + # Trick to handle correct set construction if just a single tag is given + if isinstance(tags, str): + tags = [tags] + + tag_set = set(tags) + res_set = set() + + for patch in self.__patches: + if patch.tags and tag_set.issubset(patch.tags): + res_set.add(patch) + + return PatchSet(res_set) + + def __and__(self, rhs: "PatchSet") -> "PatchSet": + return PatchSet(self.__patches.intersection(rhs.__patches)) + + def __or__(self, rhs: "PatchSet") -> "PatchSet": + """Implementing the union of two sets.""" + return PatchSet(self.__patches.union(rhs.__patches)) + + def any_of(self, tags: tp.Union[str, tp.Iterable[str]]) -> "PatchSet": + """Returns a patch set with patches containing at least one of the given + tags.""" + # Trick to handle just a single tag being passed + if isinstance(tags, str): + tags = [tags] + + result: tp.Set[Patch] = set() + for patch in self: + if patch.tags and any(tag in patch.tags for tag in tags): + result.add(patch) + + return PatchSet(result) + + def all_of(self, tags: tp.Union[str, tp.Iterable[str]]) -> "PatchSet": + """ + Returns a patch set with patches containing all the given tags. + + Equivalent to bracket operator (__getitem__) + """ + return self[tags] + + def any_of_features(self, feature_tags: tp.Iterable[str]) -> "PatchSet": + """Returns a patch set with patches containing at least one of the given + feature tags.""" + tag_set = set(feature_tags) + result: tp.Set[Patch] = set() + for patch in self: + if patch.feature_tags and patch.feature_tags.intersection(tag_set): + result.add(patch) + + return PatchSet(result) + + def all_of_features( + self, feature_tags: tp.Union[str, tp.Iterable[str]] + ) -> "PatchSet": + """Returns a patch set with patches containing all the given feature + tags.""" + tag_set = set(feature_tags) + result: tp.Set[Patch] = set() + for patch in self: + if patch.feature_tags and tag_set.issubset(patch.feature_tags): + result.add(patch) + + return PatchSet(result) + + def __hash__(self) -> int: + return hash(self.__patches) + + def __repr__(self) -> str: + repr_str = ", ".join([f"{k.shortname}" for k in self.__patches]) + + return f"PatchSet({{{repr_str}}})" + + +class PatchProvider(Provider): + """A provider for getting patch files for a certain project.""" + + patches_repository = "https://github.com/se-sic/vara-project-patches.git" + + patches_source = bb.source.Git( + remote=patches_repository, + local="patch-configurations", + refspec="origin/HEAD", + limit=None, + shallow=False + ) + + def __init__(self, project: tp.Type[Project]): + super().__init__(project) + + self._update_local_patches_repo() + repo_path = self._get_patches_repository_path() + + patches_project_dir = repo_path / self.project.NAME + + if not patches_project_dir.is_dir(): + warnings.warn( + "Could not find patches directory for project " + f"'{self.project.NAME}'." + ) + + self.__patches: tp.Set[Patch] = set() + + for root, _, files in os.walk(patches_project_dir): + for filename in files: + if not filename.endswith(".info"): + continue + + info_path = Path(os.path.join(root, filename)) + try: + current_patch = Patch.from_yaml(info_path) + self.__patches.add(current_patch) + except YAMLError: + warnings.warn( + f"Unable to parse patch info in: '{filename}'" + ) + + def get_by_shortname(self, shortname: str) -> tp.Optional[Patch]: + """ + Returns a patch with a specific shortname, if such a patch exists. + + None otherwise + """ + for patch in self.__patches: + if patch.shortname == shortname: + return patch + + return None + + def get_patches_for_revision(self, revision: CommitHash) -> PatchSet: + """Returns all patches that are valid for the given revision.""" + return PatchSet({ + p for p in self.__patches if revision in p.valid_revisions + }) + + @classmethod + def create_provider_for_project( + cls: tp.Type[ProviderType], project: tp.Type[Project] + ) -> 'PatchProvider': + """ + Creates a provider instance for the given project. + + Note: + A provider may not contain any patches at all if there are no + existing patches for a project + + Returns: + a provider instance for the given project + """ + return PatchProvider(project) + + @classmethod + def create_default_provider( + cls: tp.Type[ProviderType], project: tp.Type[Project] + ) -> 'PatchProvider': + """ + Creates a default provider instance that can be used with any project. + + Returns: + a default provider instance + """ + raise AssertionError( + "All usages should be covered by the project specific provider." + ) + + @classmethod + def _get_patches_repository_path(cls) -> Path: + # pathlib doesn't have type annotations for '/' + return tp.cast(Path, Path(target_prefix()) / cls.patches_source.local) + + @classmethod + def _update_local_patches_repo(cls) -> None: + lock_path = Path(target_prefix()) / "patch_provider.lock" + + with lock_file(lock_path): + cls.patches_source.fetch() + pull_current_branch(cls._get_patches_repository_path()) diff --git a/varats-core/varats/report/gnu_time_report.py b/varats-core/varats/report/gnu_time_report.py index 88200ba52..ddaeecac3 100644 --- a/varats-core/varats/report/gnu_time_report.py +++ b/varats-core/varats/report/gnu_time_report.py @@ -37,7 +37,7 @@ class TimeReport(BaseReport, shorthand="TR", file_type="txt"): def __init__(self, path: Path) -> None: super().__init__(path) - + self.__filesystem_io = (-1, -1) with open(self.path, 'r') as stream: for line in stream: line = line.strip() @@ -64,6 +64,16 @@ def __init__(self, path: Path) -> None: TimeReport._parse_wall_clock_time(line) continue + if line.startswith("Major (requiring I/O) page faults"): + self.__major_page_faults: int = \ + TimeReport._parse_major_page_faults(line) + continue + + if line.startswith("Minor (reclaiming a frame) page faults"): + self.__minor_page_faults: int = \ + TimeReport._parse_minor_page_faults(line) + continue + if line.startswith("Voluntary context switches"): self.__voluntary_ctx_switches: int = \ TimeReport._parse_voluntary_ctx_switches(line) @@ -74,7 +84,19 @@ def __init__(self, path: Path) -> None: TimeReport._parse_involuntary_ctx_switches(line) continue - # print("Not matched: ", line) + if line.startswith("File system inputs"): + self.__filesystem_io = ( + TimeReport._parse_filesystem_io(line), + self.__filesystem_io[1] + ) + continue + + if line.startswith("File system outputs"): + self.__filesystem_io = ( + self.__filesystem_io[0], + TimeReport._parse_filesystem_io(line) + ) + continue @property def command_name(self) -> str: @@ -101,6 +123,25 @@ def max_res_size(self) -> int: """Maximum resident size.""" return self.__max_resident_size + @property + def major_page_faults(self) -> int: + """Major page faults (require I/O).""" + return self.__major_page_faults + + @property + def minor_page_faults(self) -> int: + """Minor page faults (reclaim a frame).""" + return self.__minor_page_faults + + @property + def filesystem_io(self) -> tp.Tuple[int, int]: + """ + Filesystem inputs/outputs. + + Returns: a tuple of (#inputs, #outputs) + """ + return self.__filesystem_io + @property def voluntary_ctx_switches(self) -> int: """Number of voluntary context switches.""" @@ -217,6 +258,20 @@ def _parse_max_resident_size(line: str) -> int: "Could not parse max resident set size: ", line ) + @staticmethod + def _parse_major_page_faults(line: str) -> int: + if line.startswith("Major (requiring I/O) page faults"): + return int(line.split(":")[1]) + + raise WrongTimeReportFormat("Could not parse major page faults: ", line) + + @staticmethod + def _parse_minor_page_faults(line: str) -> int: + if line.startswith("Minor (reclaiming a frame) page faults"): + return int(line.split(":")[1]) + + raise WrongTimeReportFormat("Could not parse minor page faults: ", line) + @staticmethod def _parse_voluntary_ctx_switches(line: str) -> int: if line.startswith("Voluntary context switches"): @@ -235,6 +290,15 @@ def _parse_involuntary_ctx_switches(line: str) -> int: "Could not parse involuntary context switches: ", line ) + @staticmethod + def _parse_filesystem_io(line: str) -> int: + if line.startswith("File system "): + return int(line.split(":")[1]) + + raise WrongTimeReportFormat( + "Could not parse filesystem inputs/outputs: ", line + ) + class TimeReportAggregate( ReportAggregate[TimeReport], @@ -268,6 +332,18 @@ def measurements_ctx_switches(self) -> tp.List[int]: def max_resident_sizes(self) -> tp.List[int]: return [report.max_res_size for report in self.reports()] + @property + def major_page_faults(self) -> tp.List[int]: + return [report.major_page_faults for report in self.reports()] + + @property + def minor_page_faults(self) -> tp.List[int]: + return [report.minor_page_faults for report in self.reports()] + + @property + def filesystem_io(self) -> tp.List[tp.Tuple[int, int]]: + return [report.filesystem_io for report in self.reports()] + @property def summary(self) -> str: import numpy as np # pylint: disable=import-outside-toplevel @@ -284,7 +360,7 @@ def summary(self) -> str: class WLTimeReportAggregate( WorkloadSpecificReportAggregate[TimeReport], - shorthand=TimeReport.SHORTHAND + ReportAggregate.SHORTHAND, + shorthand="WL" + TimeReport.SHORTHAND + ReportAggregate.SHORTHAND, file_type=ReportAggregate.FILE_TYPE ): """Context Manager for parsing multiple time reports stored inside a zip diff --git a/varats-core/varats/report/linux_perf_report.py b/varats-core/varats/report/linux_perf_report.py new file mode 100644 index 000000000..ef0eacdea --- /dev/null +++ b/varats-core/varats/report/linux_perf_report.py @@ -0,0 +1,117 @@ +""" +Simple report module to create and handle the standard timing output of perf +stat. + +Examples to produce a ``LinuxPerfReport``: + + Commandline usage: + .. code-block:: bash + + export REPORT_FILE="Path/To/MyFile" + perf stat -x ";" -o $REPORT_FILE -- sleep 2 + + Experiment code: + .. code-block:: python + + from benchbuild.utils.cmd import time, sleep + report_file = "Path/To/MyFile" + command = sleep["2"] + perf("stat", "-x", "';'", "-o", f"{report_file}", "--", command) +""" +import math +import typing as tp +from pathlib import Path + +import numpy as np + +from varats.report.report import BaseReport, ReportAggregate + + +class LinuxPerfReport(BaseReport, shorthand="LPR", file_type="txt"): + """Report class to access perf stat output.""" + + def __init__(self, path: Path) -> None: + super().__init__(path) + self.__elapsed_time = math.nan + self.__ctx_switches: int = -1 + self.__branch_misses: int = -1 + + with open(self.path, 'r', newline="") as stream: + for line in stream: + line = line.strip("\n ") + # print(f"{line=}") + + if line == "" or line.startswith("#"): + continue + + if "time elapsed" in line: + self.__elapsed_time = self.__parse_elapsed_time(line) + + if "context-switches:u" in line: + self.__ctx_switches = self.__parse_ctx_switches(line) + + if "branch-misses:u" in line: + self.__branch_misses = self.__parse_branch_misses(line) + + if self.__branch_misses == math.nan: + raise AssertionError() + + @staticmethod + def __parse_elapsed_time(line: str) -> float: + return float(line.split(" ")[0].replace(",", "")) + + @staticmethod + def __parse_ctx_switches(line: str) -> int: + return int(line.split(" ")[0].replace(",", "")) + + @staticmethod + def __parse_branch_misses(line: str) -> tp.Optional[int]: + if line.startswith(""): + return None + return int(line.split(" ")[0].replace(",", "")) + + @property + def elapsed_time(self) -> float: + return self.__elapsed_time + + @property + def ctx_switches(self) -> int: + return self.__ctx_switches + + @property + def branch_misses(self) -> int: + return self.__branch_misses + + def __repr__(self) -> str: + return str(self) + + def __str__(self) -> str: + return f"""LPR ({self.path}) + ├─ ElapsedTime: {self.elapsed_time} + ├─ CtxSwitches: {self.ctx_switches} + └─ BranchMisses: {self.branch_misses} +""" + + +class LinuxPerfReportAggregate( + ReportAggregate[LinuxPerfReport], + shorthand=LinuxPerfReport.SHORTHAND + ReportAggregate.SHORTHAND, + file_type=ReportAggregate.FILE_TYPE +): + """Meta report for parsing multiple Linux perf reports stored inside a zip + file.""" + + def __init__(self, path: Path) -> None: + super().__init__(path, LinuxPerfReport) + + @property + def elapsed_time(self) -> tp.List[float]: + return [report.elapsed_time for report in self.reports()] + + @property + def ctx_switches(self) -> tp.List[int]: + return [report.ctx_switches for report in self.reports()] + + @property + def branch_misses(self) -> tp.List[int]: + return [report.branch_misses for report in self.reports()] diff --git a/varats-core/varats/report/multi_patch_report.py b/varats-core/varats/report/multi_patch_report.py new file mode 100644 index 000000000..74f861fce --- /dev/null +++ b/varats-core/varats/report/multi_patch_report.py @@ -0,0 +1,83 @@ +"""MultiPatchReport to group together similar reports that where produced for +differently patched projects.""" +import shutil +import tempfile +import typing as tp +from pathlib import Path + +from varats.provider.patch.patch_provider import Patch +from varats.report.report import ReportTy, BaseReport + + +class MultiPatchReport( + BaseReport, tp.Generic[ReportTy], shorthand="MPR", file_type=".zip" +): + """Meta report to group together reports of the same type that where + produced with differently patched projects.""" + + def __init__(self, path: Path, report_type: tp.Type[ReportTy]) -> None: + super().__init__(path) + self.__patched_reports: tp.Dict[str, ReportTy] = {} + self.__base = None + + with tempfile.TemporaryDirectory() as tmp_result_dir: + shutil.unpack_archive(path, extract_dir=tmp_result_dir) + + for report in Path(tmp_result_dir).iterdir(): + if self.is_baseline_report(report.name): + self.__base = report_type(report) + elif self.is_patched_report(report.name): + self.__patched_reports[ + self._parse_patch_shorthand_from_report_name( + report.name + )] = report_type(report) + + if not self.__base or not self.__patched_reports: + raise AssertionError( + f"Reports where missing in the file {path=}" + ) + + def get_baseline_report(self) -> ReportTy: + return self.__base + + def get_report_for_patch(self, + patch_shortname: str) -> tp.Optional[ReportTy]: + """Get the report for a given patch shortname.""" + if patch_shortname in self.__patched_reports: + return self.__patched_reports[patch_shortname] + + return None + + def get_patch_names(self) -> tp.List[str]: + return list(self.__patched_reports.keys()) + + def get_patched_reports(self) -> tp.ValuesView[ReportTy]: + return self.__patched_reports.values() + + @staticmethod + def create_baseline_report_name(base_file_name: str) -> str: + return f"baseline_{base_file_name}" + + @staticmethod + def is_baseline_report(file_name: str) -> bool: + return file_name.startswith("baseline_") + + @staticmethod + def create_patched_report_name(patch: Patch, base_file_name: str) -> str: + return ( + f"patched_{len(patch.shortname)}_" + + f"{patch.shortname}_{base_file_name}" + ) + + @staticmethod + def is_patched_report(file_name: str) -> bool: + return file_name.startswith("patched_") + + @staticmethod + def _parse_patch_shorthand_from_report_name(file_name: str) -> str: + """Parse the patch shorthand from a given patched report.""" + fn_without_prefix = file_name[len("patched_"):] + split_leftover_fn = fn_without_prefix.partition("_") + shortname_length = int(split_leftover_fn[0]) + patch_shortname = "".join(split_leftover_fn[2:])[:shortname_length] + return patch_shortname diff --git a/varats-core/varats/report/report.py b/varats-core/varats/report/report.py index d454b1ee6..ccbffcdbc 100644 --- a/varats-core/varats/report/report.py +++ b/varats-core/varats/report/report.py @@ -311,7 +311,7 @@ def experiment_shorthand(self) -> str: the experiment shorthand from a result file """ if (match := ReportFilename.__RESULT_FILE_REGEX.search(self.filename)): - return match.group("experiment_shorthand") + return match.group("experiment_shorthand").split('/')[-1] raise ValueError(f'File {self.filename} name was wrongly formatted.') @@ -412,7 +412,7 @@ def get_file_name( file_ext: file extension of the report file Returns: - name for the report file that can later be uniquly identified + name for the report file that can later be uniquely identified """ status_ext = FileStatusExtension.get_status_extension(extension_type) diff --git a/varats-core/varats/utils/config.py b/varats-core/varats/utils/config.py index 9cdb950c9..c76e6c35c 100644 --- a/varats-core/varats/utils/config.py +++ b/varats-core/varats/utils/config.py @@ -2,13 +2,23 @@ import typing as tp from pathlib import Path -from varats.base.configuration import Configuration +from varats.base.configuration import ( + Configuration, + PlainCommandlineConfiguration, + PatchConfiguration, +) from varats.mapping.configuration_map import ConfigurationMap from varats.paper.case_study import ( CaseStudy, load_configuration_map_from_case_study_file, ) -from varats.paper.paper_config import PaperConfig +from varats.paper.paper_config import PaperConfig, get_paper_config +from varats.project.sources import FeatureSource +from varats.provider.patch.patch_provider import PatchSet, PatchProvider +from varats.utils.git_util import ShortCommitHash + +if tp.TYPE_CHECKING: + from varats.project.varats_project import VProject def load_configuration_map_for_case_study( @@ -34,3 +44,90 @@ def load_configuration_map_for_case_study( f"{case_study.project_name}_{case_study.version}.case_study" ), concrete_config_type ) + + +def get_current_config_id(project: 'VProject') -> tp.Optional[int]: + """ + Get, if available, the current config id of project. Should the project be + not configuration specific ``None`` is returned. + + Args: + project: to extract the config id from + + Returns: + config_id if available for the given project + """ + if project.active_revision.has_variant(FeatureSource.LOCAL_KEY): + return int( + project.active_revision.variant_by_name(FeatureSource.LOCAL_KEY + ).version + ) + + return None + + +def get_config( + project: 'VProject', config_type: tp.Type[Configuration] +) -> tp.Optional[Configuration]: + config_id = get_current_config_id(project) + if config_id is None: + return None + + paper_config = get_paper_config() + case_studies = paper_config.get_case_studies(cs_name=project.name) + + if len(case_studies) > 1: + raise AssertionError( + "Cannot handle multiple case studies of the same project." + ) + + case_study = case_studies[0] + + config_map = load_configuration_map_for_case_study( + paper_config, case_study, config_type + ) + + config = config_map.get_configuration(config_id) + + return config + + +def get_extra_config_options(project: 'VProject') -> tp.List[str]: + """ + Get extra program options that were specified in the particular + configuration of \a Project. + + Args: + project: to get the extra options for + + Returns: + list of command line options as string + """ + config = get_config(project, PlainCommandlineConfiguration) + if not config: + return [] + return list(map(lambda option: option.value, config.options())) + + +def get_config_patches(project: 'VProject') -> PatchSet: + """ + Get required patches for the particular configuration of \a Project. + + Args: + project: to get the patches for + + Returns: + list of patches + """ + config = get_config(project, PatchConfiguration) + if not config: + return PatchSet(set()) + + patch_provider = PatchProvider.create_provider_for_project(project) + revision = ShortCommitHash(project.revision.primary.version) + feature_tags = {opt.value for opt in config.options()} + patches = patch_provider.get_patches_for_revision(revision).all_of_features( + feature_tags + ) + + return patches diff --git a/varats-core/varats/utils/filesystem_util.py b/varats-core/varats/utils/filesystem_util.py index 6f71f01d9..258fb5e27 100644 --- a/varats-core/varats/utils/filesystem_util.py +++ b/varats-core/varats/utils/filesystem_util.py @@ -1,6 +1,8 @@ """Utility functions for handling filesystem related tasks.""" - +import fcntl +import os.path import typing as tp +from contextlib import contextmanager from pathlib import Path @@ -13,3 +15,16 @@ def __init__(self, folder: tp.Union[Path, str]) -> None: f"Folder: '{str(folder)}' should be created " "but was already present." ) + + +@contextmanager +def lock_file(lock_path: Path, + lock_mode: int = fcntl.LOCK_EX) -> tp.Generator[None, None, None]: + open_mode = os.O_RDWR | os.O_CREAT | os.O_TRUNC + lock_fd = os.open(lock_path, open_mode) + try: + fcntl.flock(lock_fd, lock_mode) + yield + finally: + fcntl.flock(lock_fd, fcntl.LOCK_UN) + os.close(lock_fd) diff --git a/varats-core/varats/utils/git_util.py b/varats-core/varats/utils/git_util.py index 18cb6f714..6f0cd1c1d 100644 --- a/varats-core/varats/utils/git_util.py +++ b/varats-core/varats/utils/git_util.py @@ -3,8 +3,10 @@ import logging import re import typing as tp +from collections import defaultdict from enum import Enum from itertools import chain +from operator import attrgetter from pathlib import Path from types import TracebackType @@ -1030,8 +1032,9 @@ class RevisionBinaryMap(tp.Container[str]): def __init__(self, repo_location: Path) -> None: self.__repo_location = repo_location - self.__revision_specific_mappings: tp.Dict['AbstractRevisionRange', - ProjectBinaryWrapper] = {} + self.__revision_specific_mappings: tp.Dict[ + 'AbstractRevisionRange', + tp.List[ProjectBinaryWrapper]] = defaultdict(list) self.__always_valid_mappings: tp.List[ProjectBinaryWrapper] = [] def specify_binary( @@ -1060,7 +1063,9 @@ def specify_binary( override_entry_point = kwargs.get("override_entry_point", None) if override_entry_point: override_entry_point = Path(override_entry_point) - validity_range = kwargs.get("only_valid_in", None) + validity_range: AbstractRevisionRange = kwargs.get( + "only_valid_in", None + ) valid_exit_codes = kwargs.get("valid_exit_codes", None) wrapped_binary = ProjectBinaryWrapper( @@ -1069,7 +1074,10 @@ def specify_binary( ) if validity_range: - self.__revision_specific_mappings[validity_range] = wrapped_binary + validity_range.init_cache(self.__repo_location) + self.__revision_specific_mappings[validity_range].append( + wrapped_binary + ) else: self.__always_valid_mappings.append(wrapped_binary) @@ -1080,23 +1088,22 @@ def __getitem__(self, revision = revision.to_short_commit_hash() revision_specific_binaries = [] - for validity_range, wrapped_binary \ + for validity_range, wrapped_binaries \ in self.__revision_specific_mappings.items(): - if revision in get_all_revisions_between( - validity_range.id_start, validity_range.id_end, ShortCommitHash, - self.__repo_location - ): - revision_specific_binaries.append(wrapped_binary) + if revision in map(ShortCommitHash, validity_range): + revision_specific_binaries.extend(wrapped_binaries) revision_specific_binaries.extend(self.__always_valid_mappings) - return revision_specific_binaries + return sorted( + revision_specific_binaries, key=attrgetter("name", "path") + ) def __contains__(self, binary_name: object) -> bool: if isinstance(binary_name, str): for binary in chain( self.__always_valid_mappings, - self.__revision_specific_mappings.values() + *self.__revision_specific_mappings.values() ): if binary.name == binary_name: return True diff --git a/varats-core/varats/utils/settings.py b/varats-core/varats/utils/settings.py index 54be8fcb4..e8b207cf9 100644 --- a/varats-core/varats/utils/settings.py +++ b/varats-core/varats/utils/settings.py @@ -238,6 +238,7 @@ def vara_cfg() -> s.Configuration: _CFG, ['.varats.yaml', '.varats.yml'], "VARATS_CONFIG_FILE" ) s.update_env(_CFG) + create_missing_folders() return _CFG @@ -249,7 +250,7 @@ def add_vara_experiment_options( "outfile": { "default": "", "desc": "Path to store results of VaRA CFR analysis.", - "value": s.ConfigPath(str(varats_config["result_dir"])) + "value": str(varats_config["result_dir"]) }, "result": { "default": @@ -257,11 +258,7 @@ def add_vara_experiment_options( "desc": "Path to store already annotated projects.", "value": - s.ConfigPath( - os.path.join( - str(vara_cfg()["benchbuild_root"]), "BC_files" - ) - ) + os.path.join(str(vara_cfg()["benchbuild_root"]), "BC_files") } } @@ -286,8 +283,12 @@ def bb_cfg() -> s.Configuration: bb_cfg_path = Path(bb_root) / ".benchbuild.yml" if bb_cfg_path.exists(): BB_CFG.load(local.path(bb_cfg_path)) - BB_CFG.init_from_env() + + # Environment should always override config files + BB_CFG.init_from_env() + _BB_CFG = BB_CFG + create_missing_bb_folders() return _BB_CFG @@ -318,7 +319,7 @@ def create_missing_folder_for_cfg( if config_node.has_value() and\ config_node.value is not None and\ not path.isdir(config_node.value): - makedirs(config_node.value) + makedirs(config_node.value, exist_ok=True) create_missing_folder_for_cfg("benchbuild_root") create_missing_folder_for_cfg("result_dir") @@ -329,6 +330,32 @@ def create_missing_folder_for_cfg( create_missing_folder_for_cfg("artefacts_dir", vara_cfg()["artefacts"]) +def create_missing_bb_folders() -> None: + """Create folders that do not exist but were set in the config.""" + + def create_missing_folder_for_cfg( + cfg_varname: str, local_cfg: s.Configuration = bb_cfg() + ) -> None: + """Create missing folders for a specific config path.""" + + config_node = local_cfg[cfg_varname] + if config_node.has_value() and\ + config_node.value is not None and\ + not path.isdir(str(config_node.value)): + makedirs(str(config_node.value), exist_ok=True) + + create_missing_folder_for_cfg("outfile", bb_cfg()["varats"]) + create_missing_folder_for_cfg("result", bb_cfg()["varats"]) + create_missing_folder_for_cfg("build_dir") + create_missing_folder_for_cfg("tmp_dir") + create_missing_folder_for_cfg("node_dir", bb_cfg()["slurm"]) + create_missing_folder_for_cfg("logs", bb_cfg()["slurm"]) + create_missing_folder_for_cfg("root", bb_cfg()["container"]) + create_missing_folder_for_cfg("runroot", bb_cfg()["container"]) + create_missing_folder_for_cfg("export", bb_cfg()["container"]) + create_missing_folder_for_cfg("import", bb_cfg()["container"]) + + def save_config() -> None: """Persist VaRA config to a yaml file.""" if vara_cfg()["config_file"].value is None: @@ -350,6 +377,7 @@ def save_bb_config(benchbuild_cfg: tp.Optional[s.Configuration] = None) -> None: str(vara_cfg()["benchbuild_root"]) ) / ".benchbuild.yml" benchbuild_cfg["config_file"] = str(config_file) + create_missing_bb_folders() benchbuild_cfg.store(config_file) diff --git a/varats/setup.py b/varats/setup.py index 7b19138e6..f8457097d 100644 --- a/varats/setup.py +++ b/varats/setup.py @@ -10,14 +10,14 @@ setup( name='varats', - version='13.0.4', + version='13.0.5', url='https://github.com/se-sic/vara-tool-suite', packages=find_namespace_packages(include=['varats.*']), namespace_packages=["varats"], setup_requires=["pytest-runner", "setuptools_scm"], tests_require=["pytest", "pytest-cov"], install_requires=[ - "benchbuild>=6.7", + "benchbuild>=6.8", "click>=8.1.3", "distro>=1.5.0", "graphviz>=0.14.2", @@ -42,7 +42,7 @@ "scikit-learn>=1.2.2", "seaborn>=0.12.2", "tabulate>=0.9", - "varats-core>=13.0.4", + "varats-core>=13.0.5", "wllvm>=1.3.1", ], author="Florian Sattler", diff --git a/varats/varats/containers/containers.py b/varats/varats/containers/containers.py index 8d9aa984d..6e3e319c0 100644 --- a/varats/varats/containers/containers.py +++ b/varats/varats/containers/containers.py @@ -37,16 +37,23 @@ class ImageBase(Enum): """Container image bases that can be used by projects.""" - DEBIAN_10 = Distro.DEBIAN + DEBIAN_10 = (Distro.DEBIAN, 10) + DEBIAN_12 = (Distro.DEBIAN, 12) - def __init__(self, distro: Distro): + def __init__(self, distro: Distro, version_number: int): self.__distro = distro + self.__version_number = version_number @property def distro(self) -> Distro: """Distro of the base image.""" return self.__distro + @property + def version(self) -> int: + """Version number of the distro.""" + return self.__version_number + class ImageStage(Enum): """The stages that make up a base image.""" @@ -149,12 +156,14 @@ def tmpdir(self) -> Path: def _create_stage_00_base_layers(stage_builder: StageBuilder) -> None: _BASE_IMAGES[stage_builder.base](stage_builder) + _setup_venv(stage_builder) + if (research_tool := _get_installable_research_tool()): research_tool.container_install_dependencies(stage_builder) def _create_stage_10_varats_layers(stage_builder: StageBuilder) -> None: - stage_builder.layers.run('pip3', 'install', '--upgrade', 'pip') + stage_builder.layers.run('pip', 'install', '--upgrade', 'pip') _add_varats_layers(stage_builder) if bb_cfg()['container']['from_source']: add_benchbuild_layers(stage_builder.layers) @@ -197,9 +206,8 @@ def wrapped(stage_builder: StageBuilder) -> None: .from_("docker.io/library/debian:10") .run('apt', 'update') .run('apt', 'install', '-y', 'wget', 'gnupg', 'lsb-release', - 'software-properties-common', 'python3', 'python3-dev', - 'python3-pip', 'musl-dev', 'git', 'gcc', 'libgit2-dev', - 'libffi-dev', 'libyaml-dev', 'graphviz-dev') + 'software-properties-common', 'musl-dev', 'git', 'gcc', + 'libgit2-dev', 'libffi-dev', 'libyaml-dev', 'graphviz-dev') # install python 3.10 .run('apt', 'install', '-y', 'build-essential', 'gdb', 'lcov', 'pkg-config', 'libbz2-dev', 'libffi-dev', 'libgdbm-dev', @@ -214,13 +222,22 @@ def wrapped(stage_builder: StageBuilder) -> None: .run('make', '-j', str(get_number_of_jobs(bb_cfg()))) .run('make', 'install') .workingdir('/') - # install llvm 13 + # install llvm 14 .run('wget', 'https://apt.llvm.org/llvm.sh') .run('chmod', '+x', './llvm.sh') .run('./llvm.sh', '14', 'all') .run('ln', '-s', '/usr/bin/clang-14', '/usr/bin/clang') .run('ln', '-s', '/usr/bin/clang++-14', '/usr/bin/clang++') - .run('ln', '-s', '/usr/bin/lld-14', '/usr/bin/lld')) + .run('ln', '-s', '/usr/bin/lld-14', '/usr/bin/lld')), + ImageBase.DEBIAN_12: + _create_layers_helper(lambda ctx: ctx.layers + .from_("docker.io/library/debian:12") + .run('apt', 'update') + .run('apt', 'install', '-y', 'wget', 'gnupg', 'lsb-release', + 'software-properties-common', 'musl-dev', 'git', 'gcc', + 'libgit2-dev', 'libffi-dev', 'libyaml-dev', 'graphviz-dev', + 'python3', 'python3-pip', 'python3-virtualenv', 'clang', + 'lld', 'time')) } _STAGE_LAYERS: tp.Dict[ImageStage, @@ -310,6 +327,16 @@ def _set_varats_source_mount(image_context: StageBuilder, mnt_src: str) -> None: save_bb_config() +def _setup_venv(image_context: StageBuilder) -> None: + venv_path = "/venv" + if image_context.base == ImageBase.DEBIAN_10: + image_context.layers.run("pip3", "install", "virtualenv") + + image_context.layers.run("virtualenv", venv_path) + image_context.layers.env(VIRTUAL_ENV=venv_path) + image_context.layers.env(PATH=f"{venv_path}/bin:$PATH") + + def _add_varats_layers(image_context: StageBuilder) -> None: crun = bb_cfg()['container']['runtime'].value @@ -322,9 +349,9 @@ def from_source( tgt_dir = image_context.varats_source_mount_target image.run('mkdir', f'{tgt_dir}', runtime=crun) - image.run('pip3', 'install', 'setuptools', runtime=crun) + image.run('pip', 'install', 'setuptools', runtime=crun) - pip_args = ['pip3', 'install'] + pip_args = ['pip', 'install'] if editable_install: pip_args.append("-e") _set_varats_source_mount(image_context, str(src_dir)) @@ -339,7 +366,7 @@ def from_source( def from_pip(image: ContainerImage) -> None: LOG.debug("installing varats from pip release.") image.run( - 'pip3', + 'pip', 'install', '--ignore-installed', 'varats-core', diff --git a/varats/varats/data/metrics.py b/varats/varats/data/metrics.py index 78102735e..4524dc205 100644 --- a/varats/varats/data/metrics.py +++ b/varats/varats/data/metrics.py @@ -28,7 +28,7 @@ def gini_coefficient(distribution: pd.Series) -> float: Calculates the Gini coefficient of the data. For more information see online - `gini coefficient `_. + `Gini coefficient `_. Args: distribution: sorted series to calculate the Gini coefficient for @@ -141,10 +141,12 @@ class ConfusionMatrix(tp.Generic[T]): """ Helper class to automatically calculate classification results. - | Predicted Positive (PP) | Predicted Negative (PN) - --------------------|---------------------------|-------------------------- - Actual Positive (P) | True Positive (TP) | False Negative (FN) - Actual Negative (N) | False Positive (FP) | True Negative (TN) + +---------------------+-------------------------+-------------------------+ + | | Predicted Positive (PP) | Predicted Negative (PN) | + +---------------------+-------------------------+-------------------------+ + | Actual Positive (P) | True Positive (TP) | False Negative (FN) | + | Actual Negative (N) | False Positive (FP) | True Negative (TN) | + +---------------------+-------------------------+-------------------------+ Reference: https://en.wikipedia.org/wiki/Precision_and_recall """ diff --git a/varats/varats/experiments/base/just_compile.py b/varats/varats/experiments/base/just_compile.py index 0531bd878..cb91e936a 100644 --- a/varats/varats/experiments/base/just_compile.py +++ b/varats/varats/experiments/base/just_compile.py @@ -15,11 +15,11 @@ get_default_compile_error_wrapped, create_default_analysis_failure_handler, create_new_success_result_filepath, - get_current_config_id, ) from varats.experiment.wllvm import RunWLLVM from varats.project.varats_project import VProject from varats.report.report import ReportSpecification +from varats.utils.config import get_current_config_id # Please take care when changing this file, see docs experiments/just_compile diff --git a/varats/varats/experiments/vara/feature_experiment.py b/varats/varats/experiments/vara/feature_experiment.py index 52d2ffa16..a4cc8722d 100644 --- a/varats/varats/experiments/vara/feature_experiment.py +++ b/varats/varats/experiments/vara/feature_experiment.py @@ -17,6 +17,7 @@ Compile, Clean, ) +from benchbuild.utils.requirements import Requirement, SlurmMem from plumbum import local from varats.experiment.experiment_util import ( @@ -24,9 +25,7 @@ VersionExperiment, ZippedReportFolder, create_new_success_result_filepath, - get_current_config_id, get_default_compile_error_wrapped, - get_extra_config_options, WithUnlimitedStackSize, ) from varats.experiment.trace_util import merge_trace @@ -39,6 +38,7 @@ FeatureModelProvider, ) from varats.report.report import ReportSpecification +from varats.utils.config import get_current_config_id, get_extra_config_options class FeatureInstrType(Enum): @@ -73,6 +73,8 @@ class FeatureExperiment(VersionExperiment, shorthand=""): REPORT_SPEC = ReportSpecification() + REQUIREMENTS: tp.List[Requirement] = [SlurmMem("250G")] + @abstractmethod def actions_for_project(self, project: VProject) -> tp.MutableSequence[Step]: @@ -181,18 +183,23 @@ def get_vara_tracing_cflags( Returns: list of tracing specific cflags """ c_flags = [] + if instr_type != FeatureInstrType.NONE: c_flags += ["-fsanitize=vara", f"-fvara-instr={instr_type.value}"] + c_flags += [ "-flto", "-fuse-ld=lld", "-flegacy-pass-manager", "-fno-omit-frame-pointer" ] - if instruction_threshold is not None: + + if instruction_threshold is None: # For test projects, do not exclude small regions if project is not None and project.domain == ProjectDomains.TEST: instruction_threshold = 1 + if instruction_threshold is not None: c_flags += [f"-fvara-instruction-threshold={instruction_threshold}"] + if save_temps: c_flags += ["-Wl,-plugin-opt=save-temps"] @@ -231,7 +238,7 @@ def __call__(self) -> StepResult: def __str__(self, indent: int = 0) -> str: return textwrap.indent( - f"* {self.project.name}: Run instrumentation verifier", indent * " " + f"* {self.project.name}: Run instrumented code", indent * " " ) def run_traced_code(self) -> StepResult: diff --git a/varats/varats/paper_mgmt/case_study.py b/varats/varats/paper_mgmt/case_study.py index 823c7f154..556441cde 100644 --- a/varats/varats/paper_mgmt/case_study.py +++ b/varats/varats/paper_mgmt/case_study.py @@ -301,7 +301,7 @@ def get_newest_result_files_for_case_study( Returns: list of result file paths """ - files_to_store: tp.Dict[tp.Tuple[ShortCommitHash, tp.Optional[int]], + files_to_store: tp.Dict[tp.Tuple[ShortCommitHash, str, tp.Optional[int]], Path] = {} result_dir /= case_study.project_name @@ -319,16 +319,23 @@ def get_newest_result_files_for_case_study( ) if case_study.has_revision(commit_hash) and config_id_matches: - current_file = files_to_store.get((commit_hash, config_id), - None) + current_file = files_to_store.get( + (commit_hash, report_file.experiment_shorthand, config_id), + None + ) if current_file is None: - files_to_store[(commit_hash, config_id)] = opt_res_file + files_to_store[( + commit_hash, report_file.experiment_shorthand, config_id + )] = opt_res_file else: if ( current_file.stat().st_mtime < opt_res_file.stat().st_mtime ): - files_to_store[(commit_hash, config_id)] = opt_res_file + files_to_store[( + commit_hash, report_file.experiment_shorthand, + config_id + )] = opt_res_file return list(files_to_store.values()) diff --git a/varats/varats/plots/scatter_plot_utils.py b/varats/varats/plots/scatter_plot_utils.py index 3510d3da4..005dba2ed 100644 --- a/varats/varats/plots/scatter_plot_utils.py +++ b/varats/varats/plots/scatter_plot_utils.py @@ -13,6 +13,7 @@ def multivariate_grid( y: str, hue: str, global_kde: bool = True, + legend: bool = True, **kwargs: tp.Any ) -> sns.JointGrid: """ @@ -84,7 +85,7 @@ def multivariate_grid( color='grey', warn_singular=False ) - if len(grouped_data) > 1: + if len(grouped_data) > 1 and legend: plt.legend(legends) return grid diff --git a/varats/varats/projects/c_projects/brotli.py b/varats/varats/projects/c_projects/brotli.py index 52541a516..2c9603ad6 100644 --- a/varats/varats/projects/c_projects/brotli.py +++ b/varats/varats/projects/c_projects/brotli.py @@ -2,7 +2,7 @@ import typing as tp import benchbuild as bb -from benchbuild.utils.cmd import mkdir, make +from benchbuild.utils.cmd import cmake, mkdir, make from benchbuild.utils.revision_ranges import ( RevisionRange, block_revisions, @@ -132,7 +132,7 @@ def compile(self) -> None: mkdir(brotli_version_source / "out") with local.cwd(brotli_version_source / "out"): with local.env(CC=str(c_compiler)): - bb.watch(local["../configure-cmake"])() + bb.watch(cmake)("-G", "Unix Makefiles", "..") bb.watch(make)("-j", get_number_of_jobs(bb_cfg())) with local.cwd(brotli_version_source): diff --git a/varats/varats/projects/c_projects/bzip2.py b/varats/varats/projects/c_projects/bzip2.py index fa9de73d7..150e364a8 100644 --- a/varats/varats/projects/c_projects/bzip2.py +++ b/varats/varats/projects/c_projects/bzip2.py @@ -3,7 +3,7 @@ from pathlib import Path import benchbuild as bb -from benchbuild.command import Command, SourceRoot, WorkloadSet +from benchbuild.command import SourceRoot, WorkloadSet from benchbuild.source import HTTPMultiple from benchbuild.utils.cmd import cmake, make from benchbuild.utils.revision_ranges import RevisionRange, GoodBadSubgraph @@ -20,6 +20,8 @@ BinaryType, verify_binaries, ) +from varats.project.sources import FeatureSource +from varats.project.varats_command import VCommand from varats.project.varats_project import VProject from varats.utils.git_util import ( ShortCommitHash, @@ -56,6 +58,20 @@ class Bzip2(VProject): "countries-land-1m.geo.json", "countries-land-10m.geo.json", "countries-land-100m.geo.json" ] + ), + FeatureSource(), + HTTPMultiple( + local="geo-maps-compr", + remote={ + "1.0": + "https://github.com/se-sic/compression-data/" + "raw/master/bzip2/geo-maps/" + }, + files=[ + "countries-land-100m.geo.json.bz2", + "countries-land-10m.geo.json.bz2", + "countries-land-1m.geo.json.bz2" + ] ) ] _AUTOTOOLS_VERSIONS = GoodBadSubgraph([ @@ -80,11 +96,8 @@ class Bzip2(VProject): WORKLOADS = { WorkloadSet(WorkloadCategory.MEDIUM): [ - Command( + VCommand( SourceRoot("bzip2") / RSBinary("bzip2"), - "--compress", - "--best", - "-vvv", "--keep", # bzip2 compresses very fast even on the best setting, so we # need the three input files to get approximately 30 seconds @@ -92,11 +105,30 @@ class Bzip2(VProject): "geo-maps/countries-land-1m.geo.json", "geo-maps/countries-land-10m.geo.json", "geo-maps/countries-land-100m.geo.json", + label="med_geo", creates=[ "geo-maps/countries-land-1m.geo.json.bz2", "geo-maps/countries-land-10m.geo.json.bz2", "geo-maps/countries-land-100m.geo.json.bz2" - ] + ], + requires_all_args={"--compress"} + ), + VCommand( + SourceRoot("bzip2") / RSBinary("bzip2"), + "--keep", + # bzip2 compresses very fast even on the best setting, so we + # need the three input files to get approximately 30 seconds + # total execution time + "geo-maps-compr/countries-land-1m.geo.json.bz2", + "geo-maps-compr/countries-land-10m.geo.json.bz2", + "geo-maps-compr/countries-land-100m.geo.json.bz2", + label="med_geo", + creates=[ + "geo-maps-compr/countries-land-1m.geo.json", + "geo-maps-compr/countries-land-10m.geo.json", + "geo-maps-compr/countries-land-100m.geo.json" + ], + requires_all_args={"--decompress"} ) ], } @@ -161,3 +193,22 @@ def compile(self) -> None: ) with local.cwd(bzip2_source): verify_binaries(self) + + def recompile(self) -> None: + """Recompile the project.""" + bzip2_source = Path(self.source_of_primary) + bzip2_version = ShortCommitHash(self.version_of_primary) + + if bzip2_version in typed_revision_range( + Bzip2._MAKE_VERSIONS, bzip2_source, ShortCommitHash + ) or bzip2_version in typed_revision_range( + Bzip2._AUTOTOOLS_VERSIONS, bzip2_source, ShortCommitHash + ): + with local.cwd(bzip2_source / "build"): + bb.watch(make)("-j", get_number_of_jobs(bb_cfg())) + else: + with local.cwd(bzip2_source / "build"): + bb.watch(cmake)( + "--build", ".", "--config", "Release", "-j", + get_number_of_jobs(bb_cfg()) + ) diff --git a/varats/varats/projects/c_projects/tig.py b/varats/varats/projects/c_projects/tig.py new file mode 100644 index 000000000..659930819 --- /dev/null +++ b/varats/varats/projects/c_projects/tig.py @@ -0,0 +1,68 @@ +"""Project file for tig.""" +import typing as tp + +import benchbuild as bb +from benchbuild.utils.cmd import make +from benchbuild.utils.settings import get_number_of_jobs +from plumbum import local + +from varats.containers.containers import get_base_image, ImageBase +from varats.paper.paper_config import PaperConfigSpecificGit +from varats.project.project_domain import ProjectDomains +from varats.project.project_util import ( + ProjectBinaryWrapper, + get_local_project_git_path, + BinaryType, + verify_binaries, +) +from varats.project.varats_project import VProject +from varats.utils.git_util import ShortCommitHash, RevisionBinaryMap +from varats.utils.settings import bb_cfg + + +class Tig(VProject): + """Tig: text-mode interface for Git""" + + NAME = 'tig' + GROUP = 'c_projects' + DOMAIN = ProjectDomains.VERSION_CONTROL + + SOURCE = [ + PaperConfigSpecificGit( + project_name="tig", + remote="https://github.com/jonas/tig.git", + local="tig", + refspec="origin/HEAD", + limit=None, + shallow=False + ) + ] + + CONTAINER = get_base_image( + ImageBase.DEBIAN_10 + ).run('apt', 'install', '-y', 'git', 'libncurses-dev') + + @staticmethod + def binaries_for_revision( + revision: ShortCommitHash + ) -> tp.List[ProjectBinaryWrapper]: + binary_map = RevisionBinaryMap(get_local_project_git_path(Tig.NAME)) + + binary_map.specify_binary("src/tig", BinaryType.EXECUTABLE) + + return binary_map[revision] + + def run_tests(self) -> None: + pass + + def compile(self) -> None: + """Compile the project.""" + tig_version_source = local.path(self.source_of_primary) + + c_compiler = bb.compiler.cc(self) + with local.cwd(tig_version_source): + + with local.env(CC=str(c_compiler)): + bb.watch(make)("-j", get_number_of_jobs(bb_cfg())) + + verify_binaries(self) diff --git a/varats/varats/projects/c_projects/xz.py b/varats/varats/projects/c_projects/xz.py index 1fac7c349..3d1a580ed 100644 --- a/varats/varats/projects/c_projects/xz.py +++ b/varats/varats/projects/c_projects/xz.py @@ -2,7 +2,7 @@ import typing as tp import benchbuild as bb -from benchbuild.command import Command, SourceRoot, WorkloadSet +from benchbuild.command import SourceRoot, WorkloadSet from benchbuild.source import HTTPMultiple from benchbuild.utils.cmd import autoreconf, make from benchbuild.utils.revision_ranges import ( @@ -24,6 +24,7 @@ verify_binaries, ) from varats.project.sources import FeatureSource +from varats.project.varats_command import VCommand from varats.project.varats_project import VProject from varats.utils.git_util import ( ShortCommitHash, @@ -84,16 +85,19 @@ class Xz(VProject): WORKLOADS = { WorkloadSet(WorkloadCategory.EXAMPLE): [ - Command( + VCommand( SourceRoot("xz") / RSBinary("xz"), "-k", - "geo-maps/countries-land-1km.geo.json", + # Use output_param to ensure input file + # gets appended after all arguments. + output_param=["{output}"], + output=SourceRoot("geo-maps/countries-land-250m.geo.json"), label="countries-land-1km", creates=["geo-maps/countries-land-1km.geo.json.xz"] ) ], WorkloadSet(WorkloadCategory.MEDIUM): [ - Command( + VCommand( SourceRoot("xz") / RSBinary("xz"), "-k", "-9e", @@ -101,9 +105,13 @@ class Xz(VProject): "--threads=1", "--format=xz", "-vv", - "geo-maps/countries-land-250m.geo.json", + # Use output_param to ensure input file + # gets appended after all arguments. + output_param=["{output}"], + output=SourceRoot("geo-maps/countries-land-250m.geo.json"), label="countries-land-250m", - creates=["geo-maps/countries-land-250m.geo.json.xz"] + creates=["geo-maps/countries-land-250m.geo.json.xz"], + requires_all_args={"--compress"}, ) ], } diff --git a/varats/varats/projects/cpp_projects/dune.py b/varats/varats/projects/cpp_projects/dune.py new file mode 100644 index 000000000..4d3f58a91 --- /dev/null +++ b/varats/varats/projects/cpp_projects/dune.py @@ -0,0 +1,220 @@ +"""Project file for Dune.""" +import typing as tp + +import benchbuild as bb +from benchbuild.command import WorkloadSet, SourceRoot +from benchbuild.utils import cmd +from benchbuild.utils.revision_ranges import RevisionRange +from plumbum import local + +from varats.containers.containers import get_base_image, ImageBase +from varats.experiment.workload_util import RSBinary, WorkloadCategory +from varats.paper.paper_config import PaperConfigSpecificGit +from varats.project.project_domain import ProjectDomains +from varats.project.project_util import ( + get_local_project_git_path, + BinaryType, + ProjectBinaryWrapper, +) +from varats.project.sources import FeatureSource +from varats.project.varats_command import VCommand +from varats.project.varats_project import VProject +from varats.utils.git_util import ShortCommitHash, RevisionBinaryMap + + +class DunePerfRegression(VProject): + """ + Simulation framework for various applications in mathematics and physics. + + Note: + Currently Dune CANNOT be compiled with the Phasar passes activated + in vara. + Trying to do so will crash the compiler + + If you use Dune with an experiment that uses the vara compiler, + add `-mllvm --vara-disable-phasar` to the projects `cflags` to + disable phasar passes. + This will still allow to analyse compile-time variability. + + Might need deps: + * klu + * spqr + * umfpack + * eigen3 + """ + + NAME = 'DunePerfRegression' + GROUP = 'cpp_projects' + DOMAIN = ProjectDomains.CPP_LIBRARY + + SOURCE = [ + PaperConfigSpecificGit( + project_name='DunePerfRegression', + remote='https://github.com/se-sic/dune-VaRA.git', + local='dune-VaRA', + refspec='origin/HEAD', + limit=None, + shallow=False + ), + FeatureSource() + ] + + CONTAINER = get_base_image(ImageBase.DEBIAN_10) + + WORKLOADS = { + WorkloadSet(WorkloadCategory.EXAMPLE): [ + VCommand( + SourceRoot( + "dune-VaRA/dune-performance-regressions/build-cmake/src" + ) / RSBinary('dune_performance_regressions'), + label='dune_helloworld' + ), + VCommand( + SourceRoot( + "dune-VaRA/dune-performance-regressions/build-cmake/src" + ) / RSBinary('poisson_test'), + label='poisson_non_separated', + creates=[ + 'poisson_UG_Pk_2d.vtu', 'poisson-yasp-Q1-2d.vtu', + 'poisson-yasp-Q1-3d.vtu', 'poisson-yasp-Q2-2d.vtu', + 'poisson-yasp-Q2-3d.vtu' + ] + ), + VCommand( + SourceRoot( + "dune-VaRA/dune-performance-regressions/build-cmake/src" + ) / RSBinary('poisson_ug_pk_2d'), + label='poisson_ug_pk_2d', + creates=['poisson-UG-Pk-2d.vtu'] + ), + VCommand( + SourceRoot( + "dune-VaRA/dune-performance-regressions/build-cmake/src" + ) / RSBinary('poisson_yasp_q1_2d'), + label='poisson_yasp_q1_2d', + creates=['poisson-yasp-q1-2d.vtu'] + ), + VCommand( + SourceRoot( + "dune-VaRA/dune-performance-regressions/build-cmake/src" + ) / RSBinary('poisson_yasp_q1_3d'), + label='poisson_yasp_q1_3d', + creates=['poisson-yasp-q1-3d.vtu'] + ), + VCommand( + SourceRoot( + "dune-VaRA/dune-performance-regressions/build-cmake/src" + ) / RSBinary('poisson_yasp_q2_2d'), + label='poisson_yasp_q2_2d', + creates=['poisson-yasp-q2-2d.vtu'] + ), + VCommand( + SourceRoot( + "dune-VaRA/dune-performance-regressions/build-cmake/src" + ) / RSBinary('poisson_yasp_q2_3d'), + label='poisson_yasp_q2_3d', + creates=['poisson-yasp-q2-3d.vtu'] + ), + VCommand( + SourceRoot( + "dune-VaRA/dune-performance-regressions/build-cmake/src" + ) / RSBinary('poisson_alugrid'), + label='poisson_alugrid', + creates=['poisson_ALU_Pk_2d.vtu'] + ) + ] + } + + @staticmethod + def binaries_for_revision( + revision: ShortCommitHash + ) -> tp.List['ProjectBinaryWrapper']: + binary_map = RevisionBinaryMap( + get_local_project_git_path(DunePerfRegression.NAME) + ) + + rev_range = RevisionRange( + '332a9af0b7e3336dd72c4f4b74e09df525b6db0d', 'main' + ) + + binary_map.specify_binary( + 'dune_performance_regressions', + BinaryType.EXECUTABLE, + only_valid_in=rev_range + ) + + binary_map.specify_binary( + 'poisson_test', BinaryType.EXECUTABLE, only_valid_in=rev_range + ) + + binary_map.specify_binary( + 'poisson_alberta', BinaryType.EXECUTABLE, only_valid_in=rev_range + ) + + binary_map.specify_binary( + 'poisson_ug_pk_2d', BinaryType.EXECUTABLE, only_valid_in=rev_range + ) + + binary_map.specify_binary( + 'poisson_yasp_q1_2d', + BinaryType.EXECUTABLE, + only_valid_in=rev_range + ) + + binary_map.specify_binary( + 'poisson_yasp_q2_3d', + BinaryType.EXECUTABLE, + only_valid_in=rev_range + ) + + binary_map.specify_binary( + 'poisson_yasp_q2_2d', + BinaryType.EXECUTABLE, + only_valid_in=rev_range + ) + + binary_map.specify_binary( + 'poisson_yasp_q1_3d', + BinaryType.EXECUTABLE, + only_valid_in=rev_range + ) + + binary_map.specify_binary( + 'poisson_alugrid', BinaryType.EXECUTABLE, only_valid_in=rev_range + ) + + return binary_map[revision] + + def compile(self) -> None: + """Compile the project using the in-built tooling from dune.""" + version_source = local.path(self.source_of(self.primary_source)) + + c_compiler = bb.compiler.cc(self) + cxx_compiler = bb.compiler.cxx(self) + + with local.cwd(version_source): + with local.env( + CC=c_compiler, + CXX=cxx_compiler, + CMAKE_FLAGS=" ".join([ + "-DDUNE_ENABLE_PYTHONBINDINGS=OFF", + "-DCMAKE_DISABLE_FIND_PACKAGE_MPI=TRUE" + ]) + ): + dunecontrol = cmd['./dune-common/bin/dunecontrol'] + + bb.watch(dunecontrol + )('--module=dune-performance-regressions', 'all') + + def recompile(self) -> None: + """Recompiles Dune after e.g. a Patch has been applied.""" + version_source = local.path(self.source_of(self.primary_source)) + + with local.cwd(version_source): + dunecontrol = cmd['./dune-common/bin/dunecontrol'] + + bb.watch(dunecontrol + )('--module=dune-performance-regressions', 'make') + + def run_tests(self) -> None: + pass diff --git a/varats/varats/projects/cpp_projects/hyteg.py b/varats/varats/projects/cpp_projects/hyteg.py new file mode 100644 index 000000000..5de8f13b4 --- /dev/null +++ b/varats/varats/projects/cpp_projects/hyteg.py @@ -0,0 +1,110 @@ +"""Adds the HyTeg framework as a project to VaRA-TS.""" +import typing as tp + +import benchbuild as bb +from benchbuild.command import WorkloadSet, SourceRoot +from benchbuild.utils.cmd import make, cmake, mkdir +from benchbuild.utils.revision_ranges import SingleRevision +from benchbuild.utils.settings import get_number_of_jobs +from plumbum import local + +from varats.experiment.workload_util import WorkloadCategory, RSBinary +from varats.paper.paper_config import PaperConfigSpecificGit +from varats.project.project_domain import ProjectDomains +from varats.project.project_util import ( + get_local_project_git_path, + BinaryType, + ProjectBinaryWrapper, +) +from varats.project.sources import FeatureSource +from varats.project.varats_command import VCommand +from varats.project.varats_project import VProject +from varats.utils.git_util import ShortCommitHash, RevisionBinaryMap +from varats.utils.settings import bb_cfg + + +class HyTeg(VProject): + """ + C++ framework for large scale high performance finite element simulations + based on (but not limited to) matrix-free geometric multigrid. + + Note: + Currently HyTeg CANNOT be compiled with the Phasar passes activated + in vara. + Trying to do so will crash the compiler + + If you use Dune with an experiment that uses the vara compiler, + add `-mllvm --vara-disable-phasar` to the projects `cflags` to + disable phasar passes. + This will still allow to analyse compile-time variability. + """ + NAME = 'HyTeg' + GROUP = 'cpp_projects' + DOMAIN = ProjectDomains.HPC + + SOURCE = [ + PaperConfigSpecificGit( + project_name="HyTeg", + remote="https://github.com/se-sic/hyteg-VaRA.git", + local="HyTeg", + refspec="origin/HEAD", + limit=None, + shallow=False + ), + FeatureSource() + ] + + WORKLOADS = { + WorkloadSet(WorkloadCategory.EXAMPLE): [ + VCommand( + SourceRoot("HyTeg") / "build" / "apps" / "profiling" / + RSBinary('ProfilingApp'), + label='ProfilingApp' + ) + ] + } + + @staticmethod + def binaries_for_revision( + revision: ShortCommitHash + ) -> tp.List['ProjectBinaryWrapper']: + binaries = RevisionBinaryMap(get_local_project_git_path(HyTeg.NAME)) + + binaries.specify_binary( + "ProfilingApp", + BinaryType.EXECUTABLE, + only_valid_in=SingleRevision( + "f4711dadc3f61386e6ccdc704baa783253332db2" + ) + ) + + return binaries[revision] + + def compile(self) -> None: + """Compile HyTeg with irrelevant settings disabled.""" + hyteg_source = local.path(self.source_of(self.primary_source)) + + mkdir("-p", hyteg_source / "build") + + cc_compiler = bb.compiler.cc(self) + cxx_compiler = bb.compiler.cxx(self) + + with local.cwd(hyteg_source / "build"): + with local.env(CC=str(cc_compiler), CXX=str(cxx_compiler)): + bb.watch(cmake)( + "..", "-DWALBERLA_BUILD_WITH_MPI=OFF", + "-DHYTEG_BUILD_DOC=OFF" + ) + + with local.cwd(hyteg_source / "build" / "apps" / "profiling"): + bb.watch(make)("-j", get_number_of_jobs(bb_cfg())) + + def recompile(self) -> None: + """Recompiles HyTeg e.g. after a patch has been applied.""" + hyteg_source = local.path(self.source_of(self.primary_source)) + + with local.cwd(hyteg_source / "build" / "apps" / "profiling"): + bb.watch(make)("-j", get_number_of_jobs(bb_cfg())) + + def run_tests(self) -> None: + pass diff --git a/varats/varats/projects/cpp_projects/lepton.py b/varats/varats/projects/cpp_projects/lepton.py index ec74042d0..cd6f23a73 100644 --- a/varats/varats/projects/cpp_projects/lepton.py +++ b/varats/varats/projects/cpp_projects/lepton.py @@ -59,9 +59,10 @@ def compile(self) -> None: lepton_source = local.path(self.source_of_primary) cpp_compiler = bb.compiler.cxx(self) + c_compiler = bb.compiler.cc(self) mkdir("-p", lepton_source / "build") with local.cwd(lepton_source / "build"): - with local.env(CXX=str(cpp_compiler)): + with local.env(CC=str(c_compiler), CXX=str(cpp_compiler)): bb.watch(cmake)("..") bb.watch(make)("-j8") diff --git a/varats/varats/projects/perf_tests/feature_perf_cs_collection.py b/varats/varats/projects/perf_tests/feature_perf_cs_collection.py index cea24265b..4d3f7e143 100644 --- a/varats/varats/projects/perf_tests/feature_perf_cs_collection.py +++ b/varats/varats/projects/perf_tests/feature_perf_cs_collection.py @@ -4,12 +4,17 @@ import benchbuild as bb from benchbuild.command import Command, SourceRoot, WorkloadSet +from benchbuild.source import HTTPMultiple from benchbuild.utils.cmd import make, cmake, mkdir from benchbuild.utils.revision_ranges import RevisionRange from benchbuild.utils.settings import get_number_of_jobs from plumbum import local -from varats.experiment.workload_util import RSBinary, WorkloadCategory +from varats.experiment.workload_util import ( + RSBinary, + WorkloadCategory, + ConfigParams, +) from varats.paper.paper_config import project_filter_generator from varats.project.project_domain import ProjectDomains from varats.project.project_util import ( @@ -19,6 +24,7 @@ verify_binaries, ) from varats.project.sources import FeatureSource +from varats.project.varats_command import VCommand from varats.project.varats_project import VProject from varats.utils.git_commands import init_all_submodules, update_all_submodules from varats.utils.git_util import RevisionBinaryMap, ShortCommitHash @@ -394,3 +400,588 @@ def compile(self) -> None: def recompile(self) -> None: """Recompile the project.""" _do_feature_perf_cs_collection_recompile(self) + + +class SynthIPRuntime(VProject): + """Synthetic case-study project for testing flow sensitivity.""" + + NAME = 'SynthIPRuntime' + GROUP = 'perf_tests' + DOMAIN = ProjectDomains.TEST + + SOURCE = [ + bb.source.Git( + remote="https://github.com/se-sic/FeaturePerfCSCollection.git", + local="SynthIPRuntime", + refspec="origin/HEAD", + limit=None, + shallow=False, + version_filter=project_filter_generator("SynthIPRuntime") + ), + FeatureSource(), + HTTPMultiple( + local="geo-maps", + remote={ + "1.0": + "https://github.com/simonepri/geo-maps/releases/" + "download/v0.6.0" + }, + files=["countries-land-1km.geo.json", "countries-land-1m.geo.json"] + ) + ] + + WORKLOADS = { + WorkloadSet(WorkloadCategory.SMALL): [ + VCommand( + SourceRoot("SynthIPRuntime") / RSBinary("Runtime"), + ConfigParams("-c"), + label="countries-land-1km", + creates=[ + SourceRoot("geo-maps") / + "countries-land-1km.geo.json.compressed" + ], + requires_all_args={"-c"}, + redirect_stdin=SourceRoot("geo-maps") / + "countries-land-1km.geo.json", + redirect_stdout=SourceRoot("geo-maps") / + "countries-land-1km.geo.json.compressed" + ) + ], + WorkloadSet(WorkloadCategory.MEDIUM): [ + VCommand( + SourceRoot("SynthIPRuntime") / RSBinary("Runtime"), + ConfigParams("-c"), + label="countries-land-1km", + creates=[ + SourceRoot("geo-maps") / + "countries-land-1m.geo.json.compressed" + ], + requires_all_args={"-c"}, + redirect_stdin=SourceRoot("geo-maps") / + "countries-land-1m.geo.json", + redirect_stdout=SourceRoot("geo-maps") / + "countries-land-1m.geo.json.compressed" + ) + ], + } + + @staticmethod + def binaries_for_revision( + revision: ShortCommitHash # pylint: disable=W0613 + ) -> tp.List[ProjectBinaryWrapper]: + return RevisionBinaryMap( + get_local_project_git_path(SynthIPRuntime.NAME) + ).specify_binary( + "build/bin/Runtime", + BinaryType.EXECUTABLE, + only_valid_in=RevisionRange("4151c42ffe", "master") + )[revision] + + def run_tests(self) -> None: + pass + + def compile(self) -> None: + """Compile the project.""" + _do_feature_perf_cs_collection_compile( + self, "FPCSC_ENABLE_PROJECT_SYNTHIPRUNTIME" + ) + + def recompile(self) -> None: + """Recompile the project.""" + _do_feature_perf_cs_collection_recompile(self) + + +class SynthIPTemplate(VProject): + """Synthetic case-study project for testing flow sensitivity.""" + + NAME = 'SynthIPTemplate' + GROUP = 'perf_tests' + DOMAIN = ProjectDomains.TEST + + SOURCE = [ + bb.source.Git( + remote="https://github.com/se-sic/FeaturePerfCSCollection.git", + local="SynthIPTemplate", + refspec="origin/HEAD", + limit=None, + shallow=False, + version_filter=project_filter_generator("SynthIPTemplate") + ), + FeatureSource(), + HTTPMultiple( + local="geo-maps", + remote={ + "1.0": + "https://github.com/simonepri/geo-maps/releases/" + "download/v0.6.0" + }, + files=["countries-land-1km.geo.json", "countries-land-1m.geo.json"] + ) + ] + + WORKLOADS = { + WorkloadSet(WorkloadCategory.SMALL): [ + VCommand( + SourceRoot("SynthIPTemplate") / RSBinary("Template"), + label="countries-land-1km", + creates=[ + SourceRoot("geo-maps") / + "countries-land-1km.geo.json.compressed" + ], + requires_all_patch={"Compress"}, + redirect_stdin=SourceRoot("geo-maps") / + "countries-land-1km.geo.json", + redirect_stdout=SourceRoot("geo-maps") / + "countries-land-1km.geo.json.compressed" + ) + ], + WorkloadSet(WorkloadCategory.MEDIUM): [ + VCommand( + SourceRoot("SynthIPTemplate") / RSBinary("Template"), + label="countries-land-1km", + creates=[ + SourceRoot("geo-maps") / + "countries-land-1m.geo.json.compressed" + ], + requires_all_patch={"Compress"}, + redirect_stdin=SourceRoot("geo-maps") / + "countries-land-1m.geo.json", + redirect_stdout=SourceRoot("geo-maps") / + "countries-land-1m.geo.json.compressed" + ) + ], + } + + @staticmethod + def binaries_for_revision( + revision: ShortCommitHash # pylint: disable=W0613 + ) -> tp.List[ProjectBinaryWrapper]: + return RevisionBinaryMap( + get_local_project_git_path(SynthIPTemplate.NAME) + ).specify_binary( + "build/bin/Template", + BinaryType.EXECUTABLE, + only_valid_in=RevisionRange("4151c42ffe", "master") + )[revision] + + def run_tests(self) -> None: + pass + + def compile(self) -> None: + """Compile the project.""" + _do_feature_perf_cs_collection_compile( + self, "FPCSC_ENABLE_PROJECT_SYNTHIPTEMPLATE" + ) + + def recompile(self) -> None: + """Recompile the project.""" + _do_feature_perf_cs_collection_recompile(self) + + +class SynthIPTemplate2(VProject): + """Synthetic case-study project for testing flow sensitivity.""" + + NAME = 'SynthIPTemplate2' + GROUP = 'perf_tests' + DOMAIN = ProjectDomains.TEST + + SOURCE = [ + bb.source.Git( + remote="https://github.com/se-sic/FeaturePerfCSCollection.git", + local="SynthIPTemplate2", + refspec="origin/HEAD", + limit=None, + shallow=False, + version_filter=project_filter_generator("SynthIPTemplate2") + ), + FeatureSource(), + HTTPMultiple( + local="geo-maps", + remote={ + "1.0": + "https://github.com/simonepri/geo-maps/releases/" + "download/v0.6.0" + }, + files=["countries-land-1km.geo.json", "countries-land-1m.geo.json"] + ) + ] + + WORKLOADS = { + WorkloadSet(WorkloadCategory.SMALL): [ + VCommand( + SourceRoot("SynthIPTemplate2") / RSBinary("Template2"), + label="countries-land-1km", + creates=[ + SourceRoot("geo-maps") / + "countries-land-1km.geo.json.compressed" + ], + requires_all_patch={"Compress"}, + redirect_stdin=SourceRoot("geo-maps") / + "countries-land-1km.geo.json", + redirect_stdout=SourceRoot("geo-maps") / + "countries-land-1km.geo.json.compressed" + ) + ], + WorkloadSet(WorkloadCategory.MEDIUM): [ + VCommand( + SourceRoot("SynthIPTemplate2") / RSBinary("Template2"), + label="countries-land-1km", + creates=[ + SourceRoot("geo-maps") / + "countries-land-1m.geo.json.compressed" + ], + requires_all_patch={"Compress"}, + redirect_stdin=SourceRoot("geo-maps") / + "countries-land-1m.geo.json", + redirect_stdout=SourceRoot("geo-maps") / + "countries-land-1m.geo.json.compressed" + ) + ], + } + + @staticmethod + def binaries_for_revision( + revision: ShortCommitHash # pylint: disable=W0613 + ) -> tp.List[ProjectBinaryWrapper]: + return RevisionBinaryMap( + get_local_project_git_path(SynthIPTemplate2.NAME) + ).specify_binary( + "build/bin/Template2", + BinaryType.EXECUTABLE, + only_valid_in=RevisionRange("4151c42ffe", "master") + )[revision] + + def run_tests(self) -> None: + pass + + def compile(self) -> None: + """Compile the project.""" + _do_feature_perf_cs_collection_compile( + self, "FPCSC_ENABLE_PROJECT_SYNTHIPTEMPLATE2" + ) + + def recompile(self) -> None: + """Recompile the project.""" + _do_feature_perf_cs_collection_recompile(self) + + +class SynthIPCombined(VProject): + """Synthetic case-study project for testing flow sensitivity.""" + + NAME = 'SynthIPCombined' + GROUP = 'perf_tests' + DOMAIN = ProjectDomains.TEST + + SOURCE = [ + bb.source.Git( + remote="https://github.com/se-sic/FeaturePerfCSCollection.git", + local="SynthIPCombined", + refspec="origin/HEAD", + limit=None, + shallow=False, + version_filter=project_filter_generator("SynthIPCombined") + ), + FeatureSource(), + HTTPMultiple( + local="geo-maps", + remote={ + "1.0": + "https://github.com/simonepri/geo-maps/releases/" + "download/v0.6.0" + }, + files=["countries-land-1km.geo.json", "countries-land-1m.geo.json"] + ) + ] + + WORKLOADS = { + WorkloadSet(WorkloadCategory.SMALL): [ + VCommand( + SourceRoot("SynthIPCombined") / RSBinary("Combined"), + ConfigParams("-c"), + label="countries-land-1km", + creates=[ + SourceRoot("geo-maps") / + "countries-land-1km.geo.json.compressed" + ], + requires_all_args={"-c"}, + redirect_stdin=SourceRoot("geo-maps") / + "countries-land-1km.geo.json", + redirect_stdout=SourceRoot("geo-maps") / + "countries-land-1km.geo.json.compressed" + ) + ], + WorkloadSet(WorkloadCategory.MEDIUM): [ + VCommand( + SourceRoot("SynthIPCombined") / RSBinary("Combined"), + ConfigParams("-c"), + label="countries-land-1km", + creates=[ + SourceRoot("geo-maps") / + "countries-land-1m.geo.json.compressed" + ], + requires_all_args={"-c"}, + redirect_stdin=SourceRoot("geo-maps") / + "countries-land-1m.geo.json", + redirect_stdout=SourceRoot("geo-maps") / + "countries-land-1m.geo.json.compressed" + ) + ], + } + + @staticmethod + def binaries_for_revision( + revision: ShortCommitHash # pylint: disable=W0613 + ) -> tp.List[ProjectBinaryWrapper]: + return RevisionBinaryMap( + get_local_project_git_path(SynthIPCombined.NAME) + ).specify_binary( + "build/bin/Combined", + BinaryType.EXECUTABLE, + only_valid_in=RevisionRange("4151c42ffe", "master") + )[revision] + + def run_tests(self) -> None: + pass + + def compile(self) -> None: + """Compile the project.""" + _do_feature_perf_cs_collection_compile( + self, "FPCSC_ENABLE_PROJECT_SYNTHIPCOMBINED" + ) + + def recompile(self) -> None: + """Recompile the project.""" + _do_feature_perf_cs_collection_recompile(self) + + +class SynthCTTraitBased(VProject): + """Synthetic case-study project for testing flow sensitivity.""" + + NAME = 'SynthCTTraitBased' + GROUP = 'perf_tests' + DOMAIN = ProjectDomains.TEST + + SOURCE = [ + bb.source.Git( + remote="https://github.com/se-sic/FeaturePerfCSCollection.git", + local="SynthCTTraitBased", + refspec="origin/master", + limit=None, + shallow=False, + version_filter=project_filter_generator("SynthCTTraitBased") + ), + FeatureSource() + ] + + WORKLOADS = { + WorkloadSet(WorkloadCategory.EXAMPLE): [ + VCommand( + SourceRoot("SynthCTTraitBased") / RSBinary("CTTraitBased"), + label="CompileTime-TraitBased" + ) + ] + } + + @staticmethod + def binaries_for_revision( + revision: ShortCommitHash # pylint: disable=W0613 + ) -> tp.List[ProjectBinaryWrapper]: + binary_map = RevisionBinaryMap( + get_local_project_git_path(SynthCTTraitBased.NAME) + ) + + binary_map.specify_binary( + "build/bin/CTTraitBased", + BinaryType.EXECUTABLE, + only_valid_in=RevisionRange("6d50a6efd5", "HEAD") + ) + + return binary_map[revision] + + def run_tests(self) -> None: + pass + + def compile(self) -> None: + """Compile the project.""" + _do_feature_perf_cs_collection_compile( + self, "FPCSC_ENABLE_PROJECT_SYNTHCTTRAITBASED" + ) + + def recompile(self) -> None: + """Recompile the project.""" + _do_feature_perf_cs_collection_recompile(self) + + +class SynthCTPolicies(VProject): + """Synthetic case-study project for compile time variability using + policies.""" + + NAME = 'SynthCTPolicies' + GROUP = 'perf_tests' + DOMAIN = ProjectDomains.TEST + + SOURCE = [ + bb.source.Git( + remote="https://github.com/se-sic/FeaturePerfCSCollection.git", + local="SynthCTPolicies", + refspec="origin/master", + limit=None, + shallow=False, + version_filter=project_filter_generator("SynthCTPolicies") + ), + FeatureSource() + ] + + WORKLOADS = { + WorkloadSet(WorkloadCategory.EXAMPLE): [ + VCommand( + SourceRoot("SynthCTPolicies") / RSBinary("CTPolicies"), + label="CompileTime-Policies" + ) + ] + } + + @staticmethod + def binaries_for_revision( + revision: ShortCommitHash # pylint: disable=W0613 + ) -> tp.List[ProjectBinaryWrapper]: + binary_map = RevisionBinaryMap( + get_local_project_git_path(SynthCTPolicies.NAME) + ) + + binary_map.specify_binary( + "build/bin/CTPolicies", + BinaryType.EXECUTABLE, + only_valid_in=RevisionRange("6d50a6efd5", "HEAD") + ) + + return binary_map[revision] + + def run_tests(self) -> None: + pass + + def compile(self) -> None: + """Compile the project.""" + _do_feature_perf_cs_collection_compile( + self, "FPCSC_ENABLE_PROJECT_SYNTHCTPOLICIES" + ) + + def recompile(self) -> None: + """Recompile the project.""" + _do_feature_perf_cs_collection_recompile(self) + + +class SynthCTCRTP(VProject): + """Synthetic case-study project for compile time variability using CRTP.""" + + NAME = 'SynthCTCRTP' + GROUP = 'perf_tests' + DOMAIN = ProjectDomains.TEST + + SOURCE = [ + bb.source.Git( + remote="https://github.com/se-sic/FeaturePerfCSCollection.git", + local=NAME, + refspec="origin/master", + limit=None, + shallow=False, + version_filter=project_filter_generator(NAME) + ), + FeatureSource() + ] + + WORKLOADS = { + WorkloadSet(WorkloadCategory.EXAMPLE): [ + VCommand( + SourceRoot(NAME) / RSBinary("CTCRTP"), label="CompileTime-CRTP" + ) + ] + } + + @staticmethod + def binaries_for_revision( + revision: ShortCommitHash # pylint: disable=W0613 + ) -> tp.List[ProjectBinaryWrapper]: + binary_map = RevisionBinaryMap( + get_local_project_git_path(SynthCTCRTP.NAME) + ) + + binary_map.specify_binary( + "build/bin/CTCRTP", + BinaryType.EXECUTABLE, + only_valid_in=RevisionRange("6d50a6efd5", "HEAD") + ) + + return binary_map[revision] + + def run_tests(self) -> None: + pass + + def compile(self) -> None: + """Compile the project.""" + _do_feature_perf_cs_collection_compile( + self, "FPCSC_ENABLE_PROJECT_SYNTHCTCRTP" + ) + + def recompile(self) -> None: + """Recompile the project.""" + _do_feature_perf_cs_collection_recompile(self) + + +class SynthCTTemplateSpecialization(VProject): + """Synthetic case-study project for compile time variability using template + specialization.""" + + NAME = 'SynthCTTemplateSpecialization' + GROUP = 'perf_tests' + DOMAIN = ProjectDomains.TEST + + SOURCE = [ + bb.source.Git( + remote="https://github.com/se-sic/FeaturePerfCSCollection.git", + local=NAME, + refspec="origin/master", + limit=None, + shallow=False, + version_filter=project_filter_generator(NAME) + ), + FeatureSource() + ] + + WORKLOADS = { + WorkloadSet(WorkloadCategory.EXAMPLE): [ + VCommand( + SourceRoot(NAME) / RSBinary("CTTemplateSpecialization"), + label="CompileTime-Template-Specialization" + ) + ] + } + + @staticmethod + def binaries_for_revision( + revision: ShortCommitHash # pylint: disable=W0613 + ) -> tp.List[ProjectBinaryWrapper]: + binary_map = RevisionBinaryMap( + get_local_project_git_path(SynthCTTemplateSpecialization.NAME) + ) + + binary_map.specify_binary( + "build/bin/CTTemplateSpecialization", + BinaryType.EXECUTABLE, + only_valid_in=RevisionRange("6d50a6efd5", "HEAD") + ) + + return binary_map[revision] + + def run_tests(self) -> None: + pass + + def compile(self) -> None: + """Compile the project.""" + _do_feature_perf_cs_collection_compile( + self, "FPCSC_ENABLE_PROJECT_SYNTHCTSPECIALIZATION" + ) + + def recompile(self) -> None: + """Recompile the project.""" + _do_feature_perf_cs_collection_recompile(self) diff --git a/varats/varats/tables/time_workloads.py b/varats/varats/tables/time_workloads.py index 4120c5210..234e8254a 100644 --- a/varats/varats/tables/time_workloads.py +++ b/varats/varats/tables/time_workloads.py @@ -95,7 +95,11 @@ def wall_clock_time_in_msecs( kwargs["column_format"] = "llr|rr|r|r" return dataframe_to_table( - df, table_format, wrap_table, wrap_landscape=True, **kwargs + df, + table_format, + wrap_table=wrap_table, + wrap_landscape=True, + **kwargs ) diff --git a/varats/varats/tools/bb_config.py b/varats/varats/tools/bb_config.py index 294856346..482f8ae02 100644 --- a/varats/varats/tools/bb_config.py +++ b/varats/varats/tools/bb_config.py @@ -61,6 +61,7 @@ def update_projects( 'varats.projects.c_projects.picosat', 'varats.projects.c_projects.qemu', 'varats.projects.c_projects.redis', + 'varats.projects.c_projects.tig', 'varats.projects.c_projects.tmux', 'varats.projects.c_projects.vim', 'varats.projects.c_projects.x264', @@ -73,7 +74,9 @@ def update_projects( 'varats.projects.cpp_projects.poppler', 'varats.projects.cpp_projects.z3', 'varats.projects.cpp_projects.ect', - 'varats.projects.cpp_projects.lepton' + 'varats.projects.cpp_projects.lepton', + 'varats.projects.cpp_projects.hyteg', + 'varats.projects.cpp_projects.dune' ] projects_conf.value[:] += [ 'varats.projects.cpp_projects.doxygen', 'varats.projects.cpp_projects' @@ -179,27 +182,25 @@ def create_new_bb_config( # Set paths to defaults bb_root = str(varats_cfg["benchbuild_root"]) - new_bb_cfg["build_dir"] = s.ConfigPath(os.path.join(bb_root, "results")) - new_bb_cfg["tmp_dir"] = s.ConfigPath(os.path.join(bb_root, "tmp")) - new_bb_cfg["slurm"]["node_dir"] = s.ConfigPath( - os.path.join(bb_root, "results") + new_bb_cfg["build_dir"] = os.path.join(bb_root, "results") + new_bb_cfg["tmp_dir"] = os.path.join(bb_root, "tmp") + new_bb_cfg["slurm"]["node_dir"] = os.path.join(bb_root, "results") + new_bb_cfg["slurm"]["logs"] = os.path.join(bb_root, "slurm_logs") + new_bb_cfg["container"]["root"] = os.path.join(bb_root, "containers", "lib") + new_bb_cfg["container"]["runroot"] = os.path.join( + bb_root, "containers", "run" ) - new_bb_cfg["slurm"]["logs"] = s.ConfigPath( - os.path.join(bb_root, "slurm_logs") + new_bb_cfg["container"]["export"] = os.path.join( + bb_root, "containers", "export" ) - new_bb_cfg["container"]["root"] = s.ConfigPath( - os.path.join(bb_root, "containers", "lib") - ) - new_bb_cfg["container"]["runroot"] = s.ConfigPath( - os.path.join(bb_root, "containers", "run") - ) - new_bb_cfg["container"]["export"] = s.ConfigPath( - os.path.join(bb_root, "containers", "export") - ) - new_bb_cfg["container"]["import"] = s.ConfigPath( - os.path.join(bb_root, "containers", "export") + new_bb_cfg["container"]["import"] = os.path.join( + bb_root, "containers", "export" ) new_bb_cfg["container"]["source"] = None + new_bb_cfg["container"]["storage_driver"] = "overlay" + new_bb_cfg["container"]["storage_opts"] = [ + "mount_program=/usr/bin/fuse-overlayfs" + ] # will be set correctly when saved new_bb_cfg["config_file"] = None diff --git a/varats/varats/tools/driver_build_setup.py b/varats/varats/tools/driver_build_setup.py index 6023d9104..78e2dbbe1 100644 --- a/varats/varats/tools/driver_build_setup.py +++ b/varats/varats/tools/driver_build_setup.py @@ -270,6 +270,7 @@ def _build_in_container( install_mount = 'tools/' click.echo("Preparing container image.") + bb_cfg() # Ensure that BB config is loaded image_name = create_dev_image(image_base, build_type) source_mount = str(StageBuilder.varats_root / source_mount) diff --git a/varats/varats/tools/driver_run.py b/varats/varats/tools/driver_run.py index 854b31c9f..7c6ac4e0e 100644 --- a/varats/varats/tools/driver_run.py +++ b/varats/varats/tools/driver_run.py @@ -90,6 +90,9 @@ def __validate_project_parameters( @click.option( "--container", is_flag=True, help="Run experiments in a container." ) +@click.option( + "--debug", is_flag=True, help="Run container in an interactive debug mode." +) @click.option( "-E", "--experiment", @@ -104,6 +107,7 @@ def main( slurm: bool, submit: bool, container: bool, + debug: bool, experiment: tp.List[tp.Type['VersionExperiment']], projects: tp.List[str], pretend: bool, @@ -145,6 +149,9 @@ def main( bb_extra_args.append("--import") else: bb_command_args.append("container") + if debug: + bb_extra_args.append("--debug") + bb_extra_args.append("--interactive") if not slurm: bb_command_args.append("run") diff --git a/varats/varats/ts_utils/__init__.py b/varats/varats/ts_utils/__init__.py new file mode 100644 index 000000000..e69de29bb