diff --git a/docs/source/conf.py b/docs/source/conf.py index ef57a97cf..54392b0f2 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -87,6 +87,10 @@ # modules that require this module before setting the type checking flag. import scipy.stats # isort:skip +# Matplotlib >=3.8 has a type-checking-flag-guarded import of a symbol that does +# not exist in the shipped version. +import matplotlib.pyplot # isort:skip + # The autodocs typehints plugin does not resolve circular imports caused by type # annotations, so we have to manually break the circles. import rich.console # isort:skip diff --git a/docs/source/tutorials/configuration_specific_experiments.rst b/docs/source/tutorials/configuration_specific_experiments.rst index 16479538c..77f804b6a 100644 --- a/docs/source/tutorials/configuration_specific_experiments.rst +++ b/docs/source/tutorials/configuration_specific_experiments.rst @@ -21,6 +21,7 @@ One just needs to extend the case-study file of a project with a yaml document t .. code-block:: yaml --- + config_type: PlainCommandlineConfiguration 0: '["--foo", "--bar"]' 1: '["--foo"]' ... diff --git a/docs/source/vara-ts-api/tools/vara-cs-gui.rst b/docs/source/vara-ts-api/tools/vara-cs-gui.rst index 96c3493a7..dcdd6219c 100644 --- a/docs/source/vara-ts-api/tools/vara-cs-gui.rst +++ b/docs/source/vara-ts-api/tools/vara-cs-gui.rst @@ -10,6 +10,7 @@ The gui is started by:: The gui provides 3 Strategies to generate case studies: - Manual revision selection: Select revision from the revision history of a project. Multiple revisions can be selected by holding `ctrl` and ranges by holding `shift`. Revisions which are blocked because of bugs in the compilation of the project are marked blue. + .. figure:: vara-cs-gui-manual.png - Random Sampling: Sample a number of revisions using a random a Normal or HalfNormal Distribution. diff --git a/tests/TEST_INPUTS/paper_configs/test_config_ids/SynthIPTemplate_0.case_study b/tests/TEST_INPUTS/paper_configs/test_config_ids/SynthIPTemplate_0.case_study new file mode 100644 index 000000000..28324d99a --- /dev/null +++ b/tests/TEST_INPUTS/paper_configs/test_config_ids/SynthIPTemplate_0.case_study @@ -0,0 +1,25 @@ +--- +DocType: CaseStudy +Version: 1 +... +--- +project_name: SynthIPTemplate +stages: +- revisions: + - commit_hash: 793035062810ea3a2d9a10f831cd199fbbb82090 + commit_id: 64 + config_ids: + - 0 + - 1 + - 2 + - 3 + - 4 +version: 0 +... +--- +config_type: PatchConfiguration +0: '["Decompress"]' +1: '["Compress"]' +2: '["Compress", "fastmode", "no_smallmode"]' +3: '["Compress", "no_fastmode", "smallmode"]' +4: '["Compress", "fastmode", "smallmode"]' diff --git a/tests/TEST_INPUTS/paper_configs/test_config_ids/SynthSAContextSensitivity_0.case_study b/tests/TEST_INPUTS/paper_configs/test_config_ids/SynthSAContextSensitivity_0.case_study index 2a872480b..dd8b44c42 100644 --- a/tests/TEST_INPUTS/paper_configs/test_config_ids/SynthSAContextSensitivity_0.case_study +++ b/tests/TEST_INPUTS/paper_configs/test_config_ids/SynthSAContextSensitivity_0.case_study @@ -14,6 +14,7 @@ stages: version: 0 ... --- +config_type: PlainCommandlineConfiguration 0: '["--compress", "--mem", "10", "8"]' 1: '["--compress", "--mem", "300", "8"]' ... diff --git a/tests/TEST_INPUTS/paper_configs/test_config_ids/xz_0.case_study b/tests/TEST_INPUTS/paper_configs/test_config_ids/xz_0.case_study index e1101d9be..a544fbfbc 100644 --- a/tests/TEST_INPUTS/paper_configs/test_config_ids/xz_0.case_study +++ b/tests/TEST_INPUTS/paper_configs/test_config_ids/xz_0.case_study @@ -10,5 +10,6 @@ stages: config_ids: [1] version: 0 --- +config_type: PlainCommandlineConfiguration 0: '["--foo", "--bar"]' 1: '["--foo"]' \ No newline at end of file diff --git a/tests/TEST_INPUTS/results/SynthSAContextSensitivity/BBBaseO-CR-SynthSAContextSensitivity-ContextSense-06eac0edb6/8380144f-9a25-44c6-8ce0-08d0a29c677b_config-1_success.zip b/tests/TEST_INPUTS/results/SynthSAContextSensitivity/BBBaseO-CR-SynthSAContextSensitivity-ContextSense-06eac0edb6/8380144f-9a25-44c6-8ce0-08d0a29c677b_config-1_success.zip new file mode 100644 index 000000000..df6194dc1 Binary files /dev/null and b/tests/TEST_INPUTS/results/SynthSAContextSensitivity/BBBaseO-CR-SynthSAContextSensitivity-ContextSense-06eac0edb6/8380144f-9a25-44c6-8ce0-08d0a29c677b_config-1_success.zip differ diff --git a/tests/TEST_INPUTS/results/SynthSAContextSensitivity/BBBaseO-CR-SynthSAContextSensitivity-ContextSense-06eac0edb6/b24ee2c1-fc85-47ba-abbd-90c98e88a37c_config-0_success.zip b/tests/TEST_INPUTS/results/SynthSAContextSensitivity/BBBaseO-CR-SynthSAContextSensitivity-ContextSense-06eac0edb6/b24ee2c1-fc85-47ba-abbd-90c98e88a37c_config-0_success.zip new file mode 100644 index 000000000..02e155887 Binary files /dev/null and b/tests/TEST_INPUTS/results/SynthSAContextSensitivity/BBBaseO-CR-SynthSAContextSensitivity-ContextSense-06eac0edb6/b24ee2c1-fc85-47ba-abbd-90c98e88a37c_config-0_success.zip differ diff --git a/tests/data/test_report.py b/tests/data/test_report.py index fdb8d1196..f9e5cf9ab 100644 --- a/tests/data/test_report.py +++ b/tests/data/test_report.py @@ -156,6 +156,14 @@ def test_get_uuid(self): self.assertEqual(self.report_filename.uuid, self.correct_UUID) self.assertRaises(ValueError, lambda: self.broken_report_filename.uuid) + def test_experiment_shorthand_parsing_with_path_in_name(self) -> None: + """Checks that we correctly parse the experiment shorthand also in cases + where we have a path as part of the filename.""" + prefixed = ReportFilename( + "/tmp/foobar/" + self.report_filename.filename + ) + self.assertEqual(prefixed.experiment_shorthand, "CRE") + class TestConfigReportFilename(unittest.TestCase): """Test configuration specific ReportFilename functionality.""" diff --git a/tests/experiment/test_workload_util.py b/tests/experiment/test_workload_util.py index 729dedc24..e98e56d00 100644 --- a/tests/experiment/test_workload_util.py +++ b/tests/experiment/test_workload_util.py @@ -6,8 +6,14 @@ from benchbuild.source.base import Revision, Variant import varats.experiment.workload_util as wu +from tests.helper_utils import run_in_test_environment, UnitTestFixtures +from varats.paper.paper_config import load_paper_config from varats.projects.c_projects.xz import Xz +from varats.projects.perf_tests.feature_perf_cs_collection import ( + SynthIPTemplate, +) from varats.utils.git_util import ShortCommitHash +from varats.utils.settings import vara_cfg TT = PathToken.make_token(RootRenderer()) @@ -52,6 +58,51 @@ def test_workload_commands_tags_selected(self) -> None: ) self.assertEqual(len(commands), 1) + def test_workload_commands_requires(self) -> None: + revision = Revision(Xz, Variant(Xz.SOURCE[0], "c5c7ceb08a")) + project = Xz(revision=revision) + binary = Xz.binaries_for_revision(ShortCommitHash("c5c7ceb08a"))[0] + + commands = wu.workload_commands( + project, binary, [wu.WorkloadCategory.EXAMPLE] + ) + self.assertEqual(len(commands), 1) + commands = wu.workload_commands( + project, binary, [wu.WorkloadCategory.MEDIUM] + ) + self.assertEqual(len(commands), 1) + + @run_in_test_environment(UnitTestFixtures.PAPER_CONFIGS) + def test_workload_commands_requires_patch(self) -> None: + vara_cfg()['paper_config']['current_config'] = "test_config_ids" + load_paper_config() + + revision = Revision( + SynthIPTemplate, Variant(SynthIPTemplate.SOURCE[0], "7930350628"), + Variant(SynthIPTemplate.SOURCE[1], "1") + ) + project = SynthIPTemplate(revision=revision) + binary = SynthIPTemplate.binaries_for_revision( + ShortCommitHash("7930350628") + )[0] + workloads = wu.workload_commands(project, binary, []) + self.assertEqual(len(workloads), 2) + + @run_in_test_environment(UnitTestFixtures.PAPER_CONFIGS) + def test_workload_commands_requires_patch2(self) -> None: + vara_cfg()['paper_config']['current_config'] = "test_config_ids" + load_paper_config() + + revision = Revision( + SynthIPTemplate, Variant(SynthIPTemplate.SOURCE[0], "7930350628"), + Variant(SynthIPTemplate.SOURCE[1], "0") + ) + project = SynthIPTemplate(revision=revision) + binary = SynthIPTemplate \ + .binaries_for_revision(ShortCommitHash("7930350628"))[0] + workloads = wu.workload_commands(project, binary, []) + self.assertEqual(len(workloads), 0) + class TestWorkloadFilenames(unittest.TestCase): diff --git a/tests/paper/test_case_study.py b/tests/paper/test_case_study.py index 6d8412ee9..6a18fbbf3 100644 --- a/tests/paper/test_case_study.py +++ b/tests/paper/test_case_study.py @@ -48,6 +48,7 @@ commit_id: 494 ... --- +config_type: ConfigurationImpl 0: '{"foo": true, "bar": false, "bazz": "bazz-value", "buzz": "None"}' 1: '{}' 2: '{}' diff --git a/tests/paper_mgmt/test_case_study.py b/tests/paper_mgmt/test_case_study.py index 01e22ab56..7a70b3325 100644 --- a/tests/paper_mgmt/test_case_study.py +++ b/tests/paper_mgmt/test_case_study.py @@ -239,7 +239,8 @@ def test_get_newest_result_files_for_case_study_with_empty_res_dir( UnitTestFixtures.PAPER_CONFIGS, UnitTestFixtures.RESULT_FILES ) def test_get_newest_result_files_for_case_study_with_config(self) -> None: - """Check that when we have two files, the newes one get's selected.""" + """Check that when we have two files that differ in their config id, + both get selected.""" vara_cfg()['paper_config']['current_config'] = "test_config_ids" load_paper_config() @@ -273,7 +274,56 @@ def test_get_newest_result_files_for_case_study_with_config(self) -> None: self.assertEqual(newest_res_filenames[0].config_id, 0) self.assertEqual(newest_res_filenames[1].config_id, 1) - self.assertEqual(len(newest_res_filenames), 2) + self.assertEqual(newest_res_filenames[2].config_id, 0) + self.assertEqual(newest_res_filenames[3].config_id, 1) + self.assertEqual(len(newest_res_filenames), 4) + + @run_in_test_environment( + UnitTestFixtures.PAPER_CONFIGS, UnitTestFixtures.RESULT_FILES + ) + def test_get_newest_result_files_for_case_study_with_diff_exp(self) -> None: + """Check that when we have two files that differ in their experiment + shorthand, both get selected.""" + vara_cfg()['paper_config']['current_config'] = "test_config_ids" + load_paper_config() + + config_0_file = ReportFilename( + "BBBase-CR-SynthSAContextSensitivity-ContextSense-06eac0edb6/" + "b24ee2c1-fc85-47ba-abbd-90c98e88a37c_config-0_success.zip" + ) + config_1_file = ReportFilename( + "BBBaseO-CR-SynthSAContextSensitivity-ContextSense-06eac0edb6/" + "b24ee2c1-fc85-47ba-abbd-90c98e88a37c_config-0_success.zip" + ) + + now = datetime.now().timestamp() + file_path_0 = Path( + str(vara_cfg()['result_dir']) + ) / 'SynthSAContextSensitivity' / config_0_file.filename + os.utime(file_path_0, (now, now)) + + file_path_1 = Path( + str(vara_cfg()['result_dir']) + ) / 'SynthSAContextSensitivity' / config_1_file.filename + os.utime(file_path_1, (now, now)) + + newest_res_files = MCS.get_newest_result_files_for_case_study( + get_paper_config().get_case_studies('SynthSAContextSensitivity')[0], + Path(vara_cfg()['result_dir'].value), CR + ) + + newest_res_files.sort(reverse=True) + newest_res_filenames = [ReportFilename(x) for x in newest_res_files] + + self.assertEqual( + newest_res_filenames[0].experiment_shorthand, "BBBaseO" + ) + self.assertEqual( + newest_res_filenames[1].experiment_shorthand, "BBBaseO" + ) + self.assertEqual(newest_res_filenames[2].experiment_shorthand, "BBBase") + self.assertEqual(newest_res_filenames[3].experiment_shorthand, "BBBase") + self.assertEqual(len(newest_res_filenames), 4) def test_get_case_study_file_name_filter_empty(self) -> None: """Check that we correctly handle case study filter generation even if diff --git a/tests/provider/test_patch_provider.py b/tests/provider/test_patch_provider.py index 9acebb32e..9b55604e7 100644 --- a/tests/provider/test_patch_provider.py +++ b/tests/provider/test_patch_provider.py @@ -184,57 +184,120 @@ def setUpClass(cls) -> None: "Test-ABCD", "", path=Path("test.patch"), - tags={"A", "B", "C", "D"} + tags={"A", "B", "C", "D"}, + feature_tags={"F_A", "F_B", "F_C", "F_D"} ), - Patch("TEST", "Test-A", "", path=Path("test.patch"), tags={"A"}), - Patch("TEST", "Test-B", "", path=Path("test.patch"), tags={"B"}), - Patch("TEST", "Test-C", "", path=Path("test.patch"), tags={"C"}), - Patch("TEST", "Test-D", "", path=Path("test.patch"), tags={"D"}), Patch( - "TEST", "Test-AB", "", path=Path("test.patch"), tags={"A", "B"} + "TEST", + "Test-A", + "", + path=Path("test.patch"), + tags={"A"}, + feature_tags={"F_A"} + ), + Patch( + "TEST", + "Test-B", + "", + path=Path("test.patch"), + tags={"B"}, + feature_tags={"F_B"} + ), + Patch( + "TEST", + "Test-C", + "", + path=Path("test.patch"), + tags={"C"}, + feature_tags={"F_C"} ), Patch( - "TEST", "Test-AC", "", path=Path("test.patch"), tags={"A", "C"} + "TEST", + "Test-D", + "", + path=Path("test.patch"), + tags={"D"}, + feature_tags={"F_D"} + ), + Patch( + "TEST", + "Test-AB", + "", + path=Path("test.patch"), + tags={"A", "B"}, + feature_tags={"F_A", "F_B"} + ), + Patch( + "TEST", + "Test-AC", + "", + path=Path("test.patch"), + tags={"A", "C"}, + feature_tags={"F_A", "F_C"} ), Patch( - "TEST", "Test-AD", "", path=Path("test.patch"), tags={"A", "D"} + "TEST", + "Test-AD", + "", + path=Path("test.patch"), + tags={"A", "D"}, + feature_tags={"F_A", "F_D"} ), Patch( - "TEST", "Test-BC", "", path=Path("test.patch"), tags={"B", "C"} + "TEST", + "Test-BC", + "", + path=Path("test.patch"), + tags={"B", "C"}, + feature_tags={"F_B", "F_C"} ), Patch( - "TEST", "Test-BD", "", path=Path("test.patch"), tags={"B", "D"} + "TEST", + "Test-BD", + "", + path=Path("test.patch"), + tags={"B", "D"}, + feature_tags={"F_B", "F_D"} ), Patch( - "TEST", "Test-CD", "", path=Path("test.patch"), tags={"C", "D"} + "TEST", + "Test-CD", + "", + path=Path("test.patch"), + tags={"C", "D"}, + feature_tags={"F_C", "F_D"} ), Patch( "TEST", "Test-ABC", "", path=Path("test.patch"), - tags={"A", "B", "C"} + tags={"A", "B", "C"}, + feature_tags={"F_A", "F_B", "F_C"} ), Patch( "TEST", "Test-ABD", "", path=Path("test.patch"), - tags={"A", "B", "D"} + tags={"A", "B", "D"}, + feature_tags={"F_A", "F_B", "F_D"} ), Patch( "TEST", "Test-ACD", "", path=Path("test.patch"), - tags={"A", "C", "D"} + tags={"A", "C", "D"}, + feature_tags={"F_A", "F_C", "F_D"} ), Patch( "TEST", "Test-BCD", "", path=Path("test.patch"), - tags={"B", "C", "D"} + tags={"B", "C", "D"}, + feature_tags={"F_B", "F_C", "F_D"} ), } @@ -311,6 +374,38 @@ def test_any_of_multiple_tags(self): for patch in patches: any([tag in patch.tags for tag in tags]) + def test_all_of_single_feature_tag(self): + for tag in {"F_A", "F_B", "F_C", "F_D"}: + patches = self.patchSet.all_of_features([tag]) + self.assertEqual(8, len(patches)) + + def test_all_of_multiple_feature_tags(self): + tags_count = {("F_A", "F_B"): 4, + ("F_C", "F_B"): 4, + ("F_D", "F_B"): 4, + ("F_A", "F_B", "F_C"): 2, + ("F_A", "F_B", "F_C", "F_D"): 1} + + for tags in tags_count: + patches = self.patchSet.all_of_features(tags) + self.assertEqual(tags_count[tags], len(patches)) + + def test_any_of_single_feature_tag(self): + for tag in {"F_A", "F_B", "F_C", "F_D"}: + patches = self.patchSet.any_of_features([tag]) + self.assertEqual(8, len(patches)) + + def test_any_of_multiple_feature_tags(self): + tags_count = {("F_A", "F_B"): 12, + ("F_C", "F_B"): 12, + ("F_D", "F_B"): 12, + ("F_A", "F_B", "F_C"): 14, + ("F_A", "F_B", "F_C", "F_D"): 15} + + for tags in tags_count: + patches = self.patchSet.any_of_features(tags) + self.assertEqual(tags_count[tags], len(patches)) + def test_patchset_intersection(self): patches = self.patchSet["A"] & self.patchSet["B"] diff --git a/tests/utils/test_experiment_util.py b/tests/utils/test_experiment_util.py index 90061005b..a6bc93bd1 100644 --- a/tests/utils/test_experiment_util.py +++ b/tests/utils/test_experiment_util.py @@ -24,6 +24,9 @@ from varats.project.project_util import BinaryType, ProjectBinaryWrapper from varats.project.varats_project import VProject from varats.projects.c_projects.xz import Xz +from varats.projects.perf_tests.feature_perf_cs_collection import ( + SynthIPTemplate, +) from varats.report.gnu_time_report import TimeReport from varats.report.report import FileStatusExtension, ReportSpecification from varats.utils.git_util import ShortCommitHash @@ -419,3 +422,20 @@ def test_get_extra_config_options(self) -> None: ) project = Xz(revision=revision) self.assertEqual(EU.get_extra_config_options(project), ["--foo"]) + + @run_in_test_environment(UnitTestFixtures.PAPER_CONFIGS) + def test_get_config_patches(self) -> None: + vara_cfg()['paper_config']['current_config'] = "test_config_ids" + load_paper_config() + + revision = Revision( + SynthIPTemplate, Variant(SynthIPTemplate.SOURCE[0], "7930350628"), + Variant(SynthIPTemplate.SOURCE[1], "4") + ) + project = SynthIPTemplate(revision=revision) + patches = EU.get_config_patches(project) + self.assertEqual(len(patches), 1) + self.assertEqual( + list(patches)[0].feature_tags, + ["Compress", "fastmode", "smallmode"] + ) diff --git a/tests/utils/test_git_util.py b/tests/utils/test_git_util.py index 1450add2a..c11818665 100644 --- a/tests/utils/test_git_util.py +++ b/tests/utils/test_git_util.py @@ -2,7 +2,7 @@ import unittest from pathlib import Path -from benchbuild.utils.revision_ranges import RevisionRange +from benchbuild.utils.revision_ranges import RevisionRange, SingleRevision from varats.project.project_util import ( get_local_project_git, @@ -568,6 +568,17 @@ def test_specification_validity_range_multiple_binaries(self) -> None: self.assertIn("SingleLocalMultipleRegions", self.rv_map) self.assertIn("SingleLocalSimple", self.rv_map) + def test_specification_single_revision(self) -> None: + """Check if we can add binaries that are only valid with a single + revision.""" + self.rv_map.specify_binary( + "build/bin/SingleLocalMultipleRegions", + BinaryType.EXECUTABLE, + only_valid_in=SingleRevision("162db88346") + ) + + self.assertIn("SingleLocalMultipleRegions", self.rv_map) + def test_specification_binaries_with_special_name(self) -> None: """Check if we can add binaries that have a special name.""" self.rv_map.specify_binary( diff --git a/varats-core/varats/base/configuration.py b/varats-core/varats/base/configuration.py index 0c667dd2e..cdf6cb8e5 100644 --- a/varats-core/varats/base/configuration.py +++ b/varats-core/varats/base/configuration.py @@ -414,3 +414,49 @@ def get_config_value(self, option_name: str) -> tp.Optional[tp.Any]: def unfreeze(self) -> Configuration: return self + + +class PatchConfiguration(Configuration): + """Configuration class for projects where configuring is done by applying a + patch.""" + + def __init__(self, patch_feature_tags: tp.Set[str]): + self.__patch_feature_tags: tp.Set[ConfigurationOption] = { + ConfigurationOptionImpl(tag, tag) for tag in patch_feature_tags + } + + @staticmethod + def create_configuration_from_str(config_str: str) -> Configuration: + patch_feature_tags = json.loads(config_str) + return PatchConfiguration(patch_feature_tags) + + def add_config_option(self, option: ConfigurationOption) -> None: + self.__patch_feature_tags.add(option) + + def set_config_option(self, option_name: str, value: tp.Any) -> None: + self.__patch_feature_tags = { + option for option in self.__patch_feature_tags + if option.name != option_name + } + self.add_config_option(ConfigurationOptionImpl(option_name, value)) + + def get_config_value(self, option_name: str) -> tp.Optional[tp.Any]: + filtered_options = filter( + lambda option: (option.name == option_name), + self.__patch_feature_tags + ) + return any(filtered_options) + + def options(self) -> tp.List[ConfigurationOption]: + return list(self.__patch_feature_tags) + + def dump_to_string(self) -> str: + return ", ".join( + map(lambda option: str(option.value), self.__patch_feature_tags) + ) + + def freeze(self) -> FrozenConfiguration: + return FrozenConfiguration(deepcopy(self)) + + def unfreeze(self) -> Configuration: + return self diff --git a/varats-core/varats/experiment/experiment_util.py b/varats-core/varats/experiment/experiment_util.py index 110782502..bad60ba6f 100644 --- a/varats-core/varats/experiment/experiment_util.py +++ b/varats-core/varats/experiment/experiment_util.py @@ -10,7 +10,6 @@ from collections import defaultdict from pathlib import Path from types import TracebackType -from typing import Protocol, runtime_checkable from benchbuild import source from benchbuild.experiment import Experiment @@ -23,11 +22,17 @@ from plumbum.commands.base import BoundCommand import varats.revision.revisions as revs -from varats.base.configuration import PlainCommandlineConfiguration +from varats.base.configuration import ( + PlainCommandlineConfiguration, + PatchConfiguration, + Configuration, +) +from varats.experiment.steps.patch import ApplyPatch from varats.paper.paper_config import get_paper_config from varats.project.project_util import ProjectBinaryWrapper from varats.project.sources import FeatureSource from varats.project.varats_project import VProject +from varats.provider.patch.patch_provider import PatchSet, PatchProvider from varats.report.report import ( BaseReport, FileStatusExtension, @@ -696,20 +701,12 @@ def get_current_config_id(project: VProject) -> tp.Optional[int]: return None -def get_extra_config_options(project: VProject) -> tp.List[str]: - """ - Get extra program options that were specified in the particular - configuration of \a Project. - - Args: - project: to get the extra options for - - Returns: - list of command line options as string - """ +def get_config( + project: VProject, config_type: tp.Type[Configuration] +) -> tp.Optional[Configuration]: config_id = get_current_config_id(project) if config_id is None: - return [] + return None paper_config = get_paper_config() case_studies = paper_config.get_case_studies(cs_name=project.name) @@ -722,14 +719,68 @@ def get_extra_config_options(project: VProject) -> tp.List[str]: case_study = case_studies[0] config_map = load_configuration_map_for_case_study( - paper_config, case_study, PlainCommandlineConfiguration + paper_config, case_study, config_type ) config = config_map.get_configuration(config_id) - if config is None: - raise AssertionError( - "Requested config id was not in the map, but should be" - ) + return config + + +def get_extra_config_options(project: VProject) -> tp.List[str]: + """ + Get extra program options that were specified in the particular + configuration of \a Project. + + Args: + project: to get the extra options for + Returns: + list of command line options as string + """ + config = get_config(project, PlainCommandlineConfiguration) + if not config: + return [] return list(map(lambda option: option.value, config.options())) + + +def get_config_patches(project: VProject) -> PatchSet: + """ + Get required patches for the particular configuration of \a Project. + + Args: + project: to get the patches for + + Returns: + list of patches + """ + config = get_config(project, PatchConfiguration) + if not config: + return PatchSet(set()) + + patch_provider = PatchProvider.create_provider_for_project(project) + revision = ShortCommitHash(project.revision.primary.version) + feature_tags = {opt.value for opt in config.options()} + patches = patch_provider.get_patches_for_revision(revision).all_of_features( + feature_tags + ) + + return patches + + +def get_config_patch_steps(project: VProject) -> tp.MutableSequence[Step]: + """ + Get a list of actions that apply all configuration patches to the project. + + Args: + project: the project to be configured + + Returns: + the actions that configure the project + """ + return list( + map( + lambda patch: ApplyPatch(project, patch), + get_config_patches(project) + ) + ) diff --git a/varats-core/varats/experiment/workload_util.py b/varats-core/varats/experiment/workload_util.py index 8cf66daff..38b82720f 100644 --- a/varats-core/varats/experiment/workload_util.py +++ b/varats-core/varats/experiment/workload_util.py @@ -19,7 +19,12 @@ Command, ) +from varats.experiment.experiment_util import ( + get_extra_config_options, + get_config_patches, +) from varats.project.project_util import ProjectBinaryWrapper +from varats.project.varats_command import VCommand from varats.project.varats_project import VProject from varats.report.report import KeyedReportAggregate, ReportTy from varats.utils.exceptions import auto_unwrap @@ -92,9 +97,19 @@ def workload_commands( ) ] - return list( - filter(lambda prj_cmd: prj_cmd.path.name == binary.name, project_cmds) - ) + # Filter commands that have required args and patches set. + extra_options = set(get_extra_config_options(project)) + patches = get_config_patches(project) + + def filter_by_config(prj_cmd: ProjectCommand) -> bool: + if isinstance(prj_cmd.command, VCommand): + return prj_cmd.command.can_be_executed_by(extra_options, patches) + return True + + return [ + cmd for cmd in project_cmds + if cmd.path.name == binary.name and filter_by_config(cmd) + ] def create_workload_specific_filename( diff --git a/varats-core/varats/mapping/configuration_map.py b/varats-core/varats/mapping/configuration_map.py index f472c7d00..71a71122e 100644 --- a/varats-core/varats/mapping/configuration_map.py +++ b/varats-core/varats/mapping/configuration_map.py @@ -141,6 +141,7 @@ def create_configuration_map_from_yaml_doc( """ new_config_map = ConfigurationMap() + yaml_doc.pop("config_type", None) for config_id in sorted(yaml_doc): parsed_config = concrete_config_type.create_configuration_from_str( diff --git a/varats-core/varats/paper/case_study.py b/varats-core/varats/paper/case_study.py index 627e96d01..3fb087596 100644 --- a/varats-core/varats/paper/case_study.py +++ b/varats-core/varats/paper/case_study.py @@ -169,7 +169,7 @@ def get_config_ids_for_revision(self, revision: CommitHash) -> tp.List[int]: Returns a list of all configuration IDs specified for this revision. Args: - revision: i.e., a commit hash registed in this ``CSStage`` + revision: i.e., a commit hash registered in this ``CSStage`` Returns: list of config IDs """ @@ -580,11 +580,19 @@ def load_configuration_map_from_case_study_file( version_header.raise_if_not_type("CaseStudy") version_header.raise_if_version_is_less_than(1) - next(documents) # Skip case study yaml-doc + next(documents) # skip case study document + try: + while True: + document = next(documents) - return create_configuration_map_from_yaml_doc( - next(documents), concrete_config_type - ) + if document["config_type"] == concrete_config_type.__name__: + break + + return create_configuration_map_from_yaml_doc( + document, concrete_config_type + ) + except StopIteration: + return ConfigurationMap() def store_case_study(case_study: CaseStudy, case_study_location: Path) -> None: diff --git a/varats-core/varats/project/varats_command.py b/varats-core/varats/project/varats_command.py new file mode 100644 index 000000000..314a1ee55 --- /dev/null +++ b/varats-core/varats/project/varats_command.py @@ -0,0 +1,92 @@ +"""Custom version of benchbuild's Command for use with the VaRA-Tool-Suite.""" +import typing as tp + +from benchbuild.command import Command + +if tp.TYPE_CHECKING: + import varats.provider.patch.patch_provider as patch_provider + + +class VCommand(Command): # type: ignore [misc] + """ + Wrapper around benchbuild's Command class. + + Attributes: + requires_any_args: any of these command line args must be available for + successful execution. + requires_all_args: all of these command line args must be available for + successful execution. + requires_any_patch: any of these patch feature-tags must be available for + successful execution. + requires_all_patch: all of these patch feature-tags must be available for + successful execution. + """ + + _requires: tp.Set[str] + + def __init__( + self, + *args: tp.Any, + requires_any_args: tp.Optional[tp.Set[str]] = None, + requires_all_args: tp.Optional[tp.Set[str]] = None, + requires_any_patch: tp.Optional[tp.Set[str]] = None, + requires_all_patch: tp.Optional[tp.Set[str]] = None, + **kwargs: tp.Union[str, tp.List[str]], + ) -> None: + + super().__init__(*args, **kwargs) + self._requires_any_args = requires_any_args or set() + self._requires_all_args = requires_all_args or set() + self._requires_any_patch = requires_any_patch or set() + self._requires_all_patch = requires_all_patch or set() + + @property + def requires_any_args(self) -> tp.Set[str]: + return self._requires_any_args + + @property + def requires_all_args(self) -> tp.Set[str]: + return self._requires_all_args + + @property + def requires_any_patch(self) -> tp.Set[str]: + return self._requires_any_patch + + @property + def requires_all_patch(self) -> tp.Set[str]: + return self._requires_all_patch + + def can_be_executed_by( + self, extra_args: tp.Set[str], + applied_patches: 'patch_provider.PatchSet' + ) -> bool: + """ + Checks whether this command can be executed with the give configuration. + + Args: + extra_args: additional command line arguments that will be passed to + the command + applied_patches: patches that were applied to create the executable + + Returns: + whether this command can be executed + """ + all_args = set(self._args).union(extra_args) + all_patch_tags: tp.Set[str] = set() + for patch in applied_patches: + if patch.feature_tags: + all_patch_tags.update(patch.feature_tags) + + return bool(( + not self.requires_any_args or + all_args.intersection(self.requires_any_args) + ) and ( + not self.requires_all_args or + self.requires_all_args.issubset(all_args) + ) and ( + not self.requires_any_patch or + all_patch_tags.intersection(self.requires_any_patch) + ) and ( + not self.requires_all_patch or + self.requires_all_patch.issubset(all_patch_tags) + )) diff --git a/varats-core/varats/provider/patch/patch_provider.py b/varats-core/varats/provider/patch/patch_provider.py index 994392c5c..27ebf31ba 100644 --- a/varats-core/varats/provider/patch/patch_provider.py +++ b/varats-core/varats/provider/patch/patch_provider.py @@ -18,6 +18,7 @@ from varats.project.project_util import get_local_project_git_path from varats.provider.provider import Provider, ProviderType +from varats.utils.filesystem_util import lock_file from varats.utils.git_commands import pull_current_branch, fetch_repository from varats.utils.git_util import ( CommitHash, @@ -37,7 +38,8 @@ def __init__( description: str, path: Path, valid_revisions: tp.Optional[tp.Set[CommitHash]] = None, - tags: tp.Optional[tp.Set[str]] = None + tags: tp.Optional[tp.Set[str]] = None, + feature_tags: tp.Optional[tp.Set[str]] = None ): self.project_name: str = project_name self.shortname: str = shortname @@ -46,9 +48,10 @@ def __init__( self.valid_revisions: tp.Set[ CommitHash] = valid_revisions if valid_revisions else set() self.tags: tp.Optional[tp.Set[str]] = tags + self.feature_tags: tp.Optional[tp.Set[str]] = feature_tags @staticmethod - def from_yaml(yaml_path: Path): + def from_yaml(yaml_path: Path) -> 'Patch': """Creates a Patch from a YAML file.""" yaml_dict = yaml.safe_load(yaml_path.read_text()) @@ -61,16 +64,17 @@ def from_yaml(yaml_path: Path): # the yaml info file. path = yaml_path.parent / path - tags = None - if "tags" in yaml_dict: - tags = yaml_dict["tags"] + tags = yaml_dict.get("tags") + feature_tags = yaml_dict.get("feature_tags") project_git_path = get_local_project_git_path(project_name) # Update repository to have all upstream changes fetch_repository(project_git_path) - def parse_revisions(rev_dict: tp.Dict) -> tp.Set[CommitHash]: + def parse_revisions( + rev_dict: tp.Dict[str, tp.Any] + ) -> tp.Set[CommitHash]: res: tp.Set[CommitHash] = set() if "single_revision" in rev_dict: @@ -99,10 +103,11 @@ def parse_revisions(rev_dict: tp.Dict) -> tp.Set[CommitHash]: return res + include_revisions: tp.Set[CommitHash] if "include_revisions" in yaml_dict: include_revisions = parse_revisions(yaml_dict["include_revisions"]) else: - include_revisions: tp.Set[CommitHash] = set( + include_revisions = set( get_all_revisions_between( get_initial_commit(project_git_path).hash, "", ShortCommitHash, project_git_path @@ -115,7 +120,8 @@ def parse_revisions(rev_dict: tp.Dict) -> tp.Set[CommitHash]: ) return Patch( - project_name, shortname, description, path, include_revisions, tags + project_name, shortname, description, path, include_revisions, tags, + feature_tags ) def __repr__(self) -> str: @@ -134,18 +140,21 @@ def __str__(self) -> str: return str_representation - def __hash__(self): + def __hash__(self) -> int: + hash_args = [self.shortname, self.path] if self.tags: - return hash((self.shortname, str(self.path), tuple(self.tags))) + hash_args += tuple(self.tags) + if self.feature_tags: + hash_args += tuple(self.feature_tags) - return hash((self.shortname, str(self.path))) + return hash(tuple(hash_args)) class PatchSet: """A PatchSet is a storage container for project specific patches that can easily be accessed via the tags of a patch.""" - def __init__(self, patches: tp.Set[Patch]): + def __init__(self, patches: tp.Union[tp.Set[Patch], tp.FrozenSet[Patch]]): self.__patches: tp.FrozenSet[Patch] = frozenset(patches) def __iter__(self) -> tp.Iterator[Patch]: @@ -157,7 +166,7 @@ def __contains__(self, value: tp.Any) -> bool: def __len__(self) -> int: return len(self.__patches) - def __getitem__(self, tags: tp.Union[str, tp.Iterable[str]]): + def __getitem__(self, tags: tp.Union[str, tp.Iterable[str]]) -> 'PatchSet': """ Overrides the bracket operator of a PatchSet. @@ -210,6 +219,30 @@ def all_of(self, tags: tp.Union[str, tp.Iterable[str]]) -> "PatchSet": """ return self[tags] + def any_of_features(self, feature_tags: tp.Iterable[str]) -> "PatchSet": + """Returns a patch set with patches containing at least one of the given + feature tags.""" + tag_set = set(feature_tags) + result: tp.Set[Patch] = set() + for patch in self: + if patch.feature_tags and patch.feature_tags.intersection(tag_set): + result.add(patch) + + return PatchSet(result) + + def all_of_features( + self, feature_tags: tp.Union[str, tp.Iterable[str]] + ) -> "PatchSet": + """Returns a patch set with patches containing all the given feature + tags.""" + tag_set = set(feature_tags) + result: tp.Set[Patch] = set() + for patch in self: + if patch.feature_tags and tag_set.issubset(patch.feature_tags): + result.add(patch) + + return PatchSet(result) + def __hash__(self) -> int: return hash(self.__patches) @@ -235,12 +268,10 @@ class PatchProvider(Provider): def __init__(self, project: tp.Type[Project]): super().__init__(project) - # BB only performs a fetch so our repo might be out of date - pull_current_branch(self._get_patches_repository_path()) + self._update_local_patches_repo() + repo_path = self._get_patches_repository_path() - patches_project_dir = Path( - self._get_patches_repository_path() / self.project.NAME - ) + patches_project_dir = repo_path / self.project.NAME if not patches_project_dir.is_dir(): warnings.warn( @@ -285,7 +316,7 @@ def get_patches_for_revision(self, revision: CommitHash) -> PatchSet: @classmethod def create_provider_for_project( cls: tp.Type[ProviderType], project: tp.Type[Project] - ): + ) -> 'PatchProvider': """ Creates a provider instance for the given project. @@ -301,7 +332,7 @@ def create_provider_for_project( @classmethod def create_default_provider( cls: tp.Type[ProviderType], project: tp.Type[Project] - ): + ) -> 'PatchProvider': """ Creates a default provider instance that can be used with any project. @@ -314,6 +345,13 @@ def create_default_provider( @classmethod def _get_patches_repository_path(cls) -> Path: - cls.patches_source.fetch() + # pathlib doesn't have type annotations for '/' + return tp.cast(Path, Path(target_prefix()) / cls.patches_source.local) + + @classmethod + def _update_local_patches_repo(cls) -> None: + lock_path = Path(target_prefix()) / "patch_provider.lock" - return Path(target_prefix()) / cls.patches_source.local + with lock_file(lock_path): + cls.patches_source.fetch() + pull_current_branch(cls._get_patches_repository_path()) diff --git a/varats-core/varats/report/report.py b/varats-core/varats/report/report.py index 0a649a7dc..ccbffcdbc 100644 --- a/varats-core/varats/report/report.py +++ b/varats-core/varats/report/report.py @@ -311,7 +311,7 @@ def experiment_shorthand(self) -> str: the experiment shorthand from a result file """ if (match := ReportFilename.__RESULT_FILE_REGEX.search(self.filename)): - return match.group("experiment_shorthand") + return match.group("experiment_shorthand").split('/')[-1] raise ValueError(f'File {self.filename} name was wrongly formatted.') diff --git a/varats-core/varats/utils/filesystem_util.py b/varats-core/varats/utils/filesystem_util.py index 6f71f01d9..258fb5e27 100644 --- a/varats-core/varats/utils/filesystem_util.py +++ b/varats-core/varats/utils/filesystem_util.py @@ -1,6 +1,8 @@ """Utility functions for handling filesystem related tasks.""" - +import fcntl +import os.path import typing as tp +from contextlib import contextmanager from pathlib import Path @@ -13,3 +15,16 @@ def __init__(self, folder: tp.Union[Path, str]) -> None: f"Folder: '{str(folder)}' should be created " "but was already present." ) + + +@contextmanager +def lock_file(lock_path: Path, + lock_mode: int = fcntl.LOCK_EX) -> tp.Generator[None, None, None]: + open_mode = os.O_RDWR | os.O_CREAT | os.O_TRUNC + lock_fd = os.open(lock_path, open_mode) + try: + fcntl.flock(lock_fd, lock_mode) + yield + finally: + fcntl.flock(lock_fd, fcntl.LOCK_UN) + os.close(lock_fd) diff --git a/varats-core/varats/utils/git_util.py b/varats-core/varats/utils/git_util.py index 413ecd269..6f0cd1c1d 100644 --- a/varats-core/varats/utils/git_util.py +++ b/varats-core/varats/utils/git_util.py @@ -1063,7 +1063,9 @@ def specify_binary( override_entry_point = kwargs.get("override_entry_point", None) if override_entry_point: override_entry_point = Path(override_entry_point) - validity_range = kwargs.get("only_valid_in", None) + validity_range: AbstractRevisionRange = kwargs.get( + "only_valid_in", None + ) valid_exit_codes = kwargs.get("valid_exit_codes", None) wrapped_binary = ProjectBinaryWrapper( @@ -1072,6 +1074,7 @@ def specify_binary( ) if validity_range: + validity_range.init_cache(self.__repo_location) self.__revision_specific_mappings[validity_range].append( wrapped_binary ) @@ -1087,10 +1090,7 @@ def __getitem__(self, for validity_range, wrapped_binaries \ in self.__revision_specific_mappings.items(): - if revision in get_all_revisions_between( - validity_range.id_start, validity_range.id_end, ShortCommitHash, - self.__repo_location - ): + if revision in map(ShortCommitHash, validity_range): revision_specific_binaries.extend(wrapped_binaries) revision_specific_binaries.extend(self.__always_valid_mappings) diff --git a/varats-core/varats/utils/settings.py b/varats-core/varats/utils/settings.py index bac6860ad..e8b207cf9 100644 --- a/varats-core/varats/utils/settings.py +++ b/varats-core/varats/utils/settings.py @@ -283,7 +283,10 @@ def bb_cfg() -> s.Configuration: bb_cfg_path = Path(bb_root) / ".benchbuild.yml" if bb_cfg_path.exists(): BB_CFG.load(local.path(bb_cfg_path)) - BB_CFG.init_from_env() + + # Environment should always override config files + BB_CFG.init_from_env() + _BB_CFG = BB_CFG create_missing_bb_folders() return _BB_CFG diff --git a/varats/varats/data/metrics.py b/varats/varats/data/metrics.py index 78102735e..4524dc205 100644 --- a/varats/varats/data/metrics.py +++ b/varats/varats/data/metrics.py @@ -28,7 +28,7 @@ def gini_coefficient(distribution: pd.Series) -> float: Calculates the Gini coefficient of the data. For more information see online - `gini coefficient `_. + `Gini coefficient `_. Args: distribution: sorted series to calculate the Gini coefficient for @@ -141,10 +141,12 @@ class ConfusionMatrix(tp.Generic[T]): """ Helper class to automatically calculate classification results. - | Predicted Positive (PP) | Predicted Negative (PN) - --------------------|---------------------------|-------------------------- - Actual Positive (P) | True Positive (TP) | False Negative (FN) - Actual Negative (N) | False Positive (FP) | True Negative (TN) + +---------------------+-------------------------+-------------------------+ + | | Predicted Positive (PP) | Predicted Negative (PN) | + +---------------------+-------------------------+-------------------------+ + | Actual Positive (P) | True Positive (TP) | False Negative (FN) | + | Actual Negative (N) | False Positive (FP) | True Negative (TN) | + +---------------------+-------------------------+-------------------------+ Reference: https://en.wikipedia.org/wiki/Precision_and_recall """ diff --git a/varats/varats/paper_mgmt/case_study.py b/varats/varats/paper_mgmt/case_study.py index 823c7f154..556441cde 100644 --- a/varats/varats/paper_mgmt/case_study.py +++ b/varats/varats/paper_mgmt/case_study.py @@ -301,7 +301,7 @@ def get_newest_result_files_for_case_study( Returns: list of result file paths """ - files_to_store: tp.Dict[tp.Tuple[ShortCommitHash, tp.Optional[int]], + files_to_store: tp.Dict[tp.Tuple[ShortCommitHash, str, tp.Optional[int]], Path] = {} result_dir /= case_study.project_name @@ -319,16 +319,23 @@ def get_newest_result_files_for_case_study( ) if case_study.has_revision(commit_hash) and config_id_matches: - current_file = files_to_store.get((commit_hash, config_id), - None) + current_file = files_to_store.get( + (commit_hash, report_file.experiment_shorthand, config_id), + None + ) if current_file is None: - files_to_store[(commit_hash, config_id)] = opt_res_file + files_to_store[( + commit_hash, report_file.experiment_shorthand, config_id + )] = opt_res_file else: if ( current_file.stat().st_mtime < opt_res_file.stat().st_mtime ): - files_to_store[(commit_hash, config_id)] = opt_res_file + files_to_store[( + commit_hash, report_file.experiment_shorthand, + config_id + )] = opt_res_file return list(files_to_store.values()) diff --git a/varats/varats/projects/c_projects/xz.py b/varats/varats/projects/c_projects/xz.py index 1fac7c349..3d1a580ed 100644 --- a/varats/varats/projects/c_projects/xz.py +++ b/varats/varats/projects/c_projects/xz.py @@ -2,7 +2,7 @@ import typing as tp import benchbuild as bb -from benchbuild.command import Command, SourceRoot, WorkloadSet +from benchbuild.command import SourceRoot, WorkloadSet from benchbuild.source import HTTPMultiple from benchbuild.utils.cmd import autoreconf, make from benchbuild.utils.revision_ranges import ( @@ -24,6 +24,7 @@ verify_binaries, ) from varats.project.sources import FeatureSource +from varats.project.varats_command import VCommand from varats.project.varats_project import VProject from varats.utils.git_util import ( ShortCommitHash, @@ -84,16 +85,19 @@ class Xz(VProject): WORKLOADS = { WorkloadSet(WorkloadCategory.EXAMPLE): [ - Command( + VCommand( SourceRoot("xz") / RSBinary("xz"), "-k", - "geo-maps/countries-land-1km.geo.json", + # Use output_param to ensure input file + # gets appended after all arguments. + output_param=["{output}"], + output=SourceRoot("geo-maps/countries-land-250m.geo.json"), label="countries-land-1km", creates=["geo-maps/countries-land-1km.geo.json.xz"] ) ], WorkloadSet(WorkloadCategory.MEDIUM): [ - Command( + VCommand( SourceRoot("xz") / RSBinary("xz"), "-k", "-9e", @@ -101,9 +105,13 @@ class Xz(VProject): "--threads=1", "--format=xz", "-vv", - "geo-maps/countries-land-250m.geo.json", + # Use output_param to ensure input file + # gets appended after all arguments. + output_param=["{output}"], + output=SourceRoot("geo-maps/countries-land-250m.geo.json"), label="countries-land-250m", - creates=["geo-maps/countries-land-250m.geo.json.xz"] + creates=["geo-maps/countries-land-250m.geo.json.xz"], + requires_all_args={"--compress"}, ) ], } diff --git a/varats/varats/projects/perf_tests/feature_perf_cs_collection.py b/varats/varats/projects/perf_tests/feature_perf_cs_collection.py index cea24265b..6ff8db619 100644 --- a/varats/varats/projects/perf_tests/feature_perf_cs_collection.py +++ b/varats/varats/projects/perf_tests/feature_perf_cs_collection.py @@ -4,6 +4,7 @@ import benchbuild as bb from benchbuild.command import Command, SourceRoot, WorkloadSet +from benchbuild.source import HTTPMultiple from benchbuild.utils.cmd import make, cmake, mkdir from benchbuild.utils.revision_ranges import RevisionRange from benchbuild.utils.settings import get_number_of_jobs @@ -19,6 +20,7 @@ verify_binaries, ) from varats.project.sources import FeatureSource +from varats.project.varats_command import VCommand from varats.project.varats_project import VProject from varats.utils.git_commands import init_all_submodules, update_all_submodules from varats.utils.git_util import RevisionBinaryMap, ShortCommitHash @@ -394,3 +396,308 @@ def compile(self) -> None: def recompile(self) -> None: """Recompile the project.""" _do_feature_perf_cs_collection_recompile(self) + + +class SynthIPRuntime(VProject): + """Synthetic case-study project for testing flow sensitivity.""" + + NAME = 'SynthIPRuntime' + GROUP = 'perf_tests' + DOMAIN = ProjectDomains.TEST + + SOURCE = [ + bb.source.Git( + remote="https://github.com/se-sic/FeaturePerfCSCollection.git", + local="SynthIPRuntime", + refspec="origin/HEAD", + limit=None, + shallow=False, + version_filter=project_filter_generator("SynthIPRuntime") + ), + HTTPMultiple( + local="geo-maps", + remote={ + "1.0": + "https://github.com/simonepri/geo-maps/releases/" + "download/v0.6.0" + }, + files=["countries-land-1km.geo.json", "countries-land-1m.geo.json"] + ), + FeatureSource() + ] + + WORKLOADS = { + WorkloadSet(WorkloadCategory.SMALL): [ + VCommand( + SourceRoot("SynthIPRuntime") / RSBinary("Runtime"), + "-c", + "<", + "geo-maps/countries-land-1km.geo.json", + ">", + "geo-maps/countries-land-1km.geo.json.compressed", + label="countries-land-1km", + creates=["geo-maps/countries-land-1km.geo.json.compressed"], + requires_all_args={"-c"} + ) + ], + WorkloadSet(WorkloadCategory.MEDIUM): [ + VCommand( + SourceRoot("SynthIPRuntime") / RSBinary("Runtime"), + "-c", + "<", + "geo-maps/countries-land-1km.geo.json", + ">", + "geo-maps/countries-land-1km.geo.json.compressed", + label="countries-land-1m", + creates=["geo-maps/countries-land-1m.geo.json.compressed"], + requires_all_args={"-c"} + ) + ], + } + + @staticmethod + def binaries_for_revision( + revision: ShortCommitHash # pylint: disable=W0613 + ) -> tp.List[ProjectBinaryWrapper]: + return RevisionBinaryMap( + get_local_project_git_path(SynthIPRuntime.NAME) + ).specify_binary( + "build/bin/Runtime", + BinaryType.EXECUTABLE, + only_valid_in=RevisionRange("4151c42ffe", "master") + )[revision] + + def run_tests(self) -> None: + pass + + def compile(self) -> None: + """Compile the project.""" + _do_feature_perf_cs_collection_compile( + self, "FPCSC_ENABLE_PROJECT_SYNTHIPRUNTIME" + ) + + def recompile(self) -> None: + """Recompile the project.""" + _do_feature_perf_cs_collection_recompile(self) + + +class SynthIPTemplate(VProject): + """Synthetic case-study project for testing flow sensitivity.""" + + NAME = 'SynthIPTemplate' + GROUP = 'perf_tests' + DOMAIN = ProjectDomains.TEST + + SOURCE = [ + bb.source.Git( + remote="https://github.com/se-sic/FeaturePerfCSCollection.git", + local="SynthIPTemplate", + refspec="origin/HEAD", + limit=None, + shallow=False, + version_filter=project_filter_generator("SynthIPTemplate") + ), + FeatureSource() + ] + + WORKLOADS = { + WorkloadSet(WorkloadCategory.SMALL): [ + VCommand( + SourceRoot("SynthIPTemplate") / RSBinary("Template"), + "-c", + "<", + "geo-maps/countries-land-1km.geo.json", + ">", + "geo-maps/countries-land-1km.geo.json.compressed", + label="countries-land-1km", + creates=["geo-maps/countries-land-1km.geo.json.compressed"], + requires_all_patch={"Compress"} + ) + ], + WorkloadSet(WorkloadCategory.MEDIUM): [ + VCommand( + SourceRoot("SynthIPTemplate") / RSBinary("Template"), + "-c", + "<", + "geo-maps/countries-land-1km.geo.json", + ">", + "geo-maps/countries-land-1km.geo.json.compressed", + label="countries-land-1m", + creates=["geo-maps/countries-land-1m.geo.json.compressed"], + requires_all_patch={"Compress"} + ) + ], + } + + @staticmethod + def binaries_for_revision( + revision: ShortCommitHash # pylint: disable=W0613 + ) -> tp.List[ProjectBinaryWrapper]: + return RevisionBinaryMap( + get_local_project_git_path(SynthIPTemplate.NAME) + ).specify_binary( + "build/bin/Template", + BinaryType.EXECUTABLE, + only_valid_in=RevisionRange("4151c42ffe", "master") + )[revision] + + def run_tests(self) -> None: + pass + + def compile(self) -> None: + """Compile the project.""" + _do_feature_perf_cs_collection_compile( + self, "FPCSC_ENABLE_PROJECT_SYNTHIPTEMPLATE" + ) + + def recompile(self) -> None: + """Recompile the project.""" + _do_feature_perf_cs_collection_recompile(self) + + +class SynthIPTemplate2(VProject): + """Synthetic case-study project for testing flow sensitivity.""" + + NAME = 'SynthIPTemplate2' + GROUP = 'perf_tests' + DOMAIN = ProjectDomains.TEST + + SOURCE = [ + bb.source.Git( + remote="https://github.com/se-sic/FeaturePerfCSCollection.git", + local="SynthIPTemplate2", + refspec="origin/HEAD", + limit=None, + shallow=False, + version_filter=project_filter_generator("SynthIPTemplate2") + ), + FeatureSource() + ] + + WORKLOADS = { + WorkloadSet(WorkloadCategory.SMALL): [ + VCommand( + SourceRoot("SynthIPTemplate2") / RSBinary("Template2"), + "-c", + "<", + "geo-maps/countries-land-1km.geo.json", + ">", + "geo-maps/countries-land-1km.geo.json.compressed", + label="countries-land-1km", + creates=["geo-maps/countries-land-1km.geo.json.compressed"], + requires_all_patch={"Compress"} + ) + ], + WorkloadSet(WorkloadCategory.MEDIUM): [ + VCommand( + SourceRoot("SynthIPTemplate2") / RSBinary("Template2"), + "-c", + "<", + "geo-maps/countries-land-1km.geo.json", + ">", + "geo-maps/countries-land-1km.geo.json.compressed", + label="countries-land-1m", + creates=["geo-maps/countries-land-1m.geo.json.compressed"], + requires_all_patch={"Compress"} + ) + ], + } + + @staticmethod + def binaries_for_revision( + revision: ShortCommitHash # pylint: disable=W0613 + ) -> tp.List[ProjectBinaryWrapper]: + return RevisionBinaryMap( + get_local_project_git_path(SynthIPTemplate2.NAME) + ).specify_binary( + "build/bin/Template2", + BinaryType.EXECUTABLE, + only_valid_in=RevisionRange("4151c42ffe", "master") + )[revision] + + def run_tests(self) -> None: + pass + + def compile(self) -> None: + """Compile the project.""" + _do_feature_perf_cs_collection_compile( + self, "FPCSC_ENABLE_PROJECT_SYNTHIPTEMPLATE2" + ) + + def recompile(self) -> None: + """Recompile the project.""" + _do_feature_perf_cs_collection_recompile(self) + + +class SynthIPCombined(VProject): + """Synthetic case-study project for testing flow sensitivity.""" + + NAME = 'SynthIPCombined' + GROUP = 'perf_tests' + DOMAIN = ProjectDomains.TEST + + SOURCE = [ + bb.source.Git( + remote="https://github.com/se-sic/FeaturePerfCSCollection.git", + local="SynthIPCombined", + refspec="origin/HEAD", + limit=None, + shallow=False, + version_filter=project_filter_generator("SynthIPCombined") + ), + FeatureSource() + ] + + WORKLOADS = { + WorkloadSet(WorkloadCategory.SMALL): [ + VCommand( + SourceRoot("SynthIPCombined") / RSBinary("Combined"), + "-c", + "<", + "geo-maps/countries-land-1km.geo.json", + ">", + "geo-maps/countries-land-1km.geo.json.compressed", + label="countries-land-1km", + creates=["geo-maps/countries-land-1km.geo.json.compressed"], + requires_all_args={"-c"} + ) + ], + WorkloadSet(WorkloadCategory.MEDIUM): [ + VCommand( + SourceRoot("SynthIPCombined") / RSBinary("Combined"), + "-c", + "<", + "geo-maps/countries-land-1km.geo.json", + ">", + "geo-maps/countries-land-1km.geo.json.compressed", + label="countries-land-1m", + creates=["geo-maps/countries-land-1m.geo.json.compressed"], + requires_all_args={"-c"} + ) + ], + } + + @staticmethod + def binaries_for_revision( + revision: ShortCommitHash # pylint: disable=W0613 + ) -> tp.List[ProjectBinaryWrapper]: + return RevisionBinaryMap( + get_local_project_git_path(SynthIPCombined.NAME) + ).specify_binary( + "build/bin/Combined", + BinaryType.EXECUTABLE, + only_valid_in=RevisionRange("4151c42ffe", "master") + )[revision] + + def run_tests(self) -> None: + pass + + def compile(self) -> None: + """Compile the project.""" + _do_feature_perf_cs_collection_compile( + self, "FPCSC_ENABLE_PROJECT_SYNTHIPCOMBINED" + ) + + def recompile(self) -> None: + """Recompile the project.""" + _do_feature_perf_cs_collection_recompile(self)