From ac924269e0f70fc58a5d743537ce9fc8a442d1e1 Mon Sep 17 00:00:00 2001 From: John Andersen Date: Mon, 25 Mar 2024 19:04:33 +0000 Subject: [PATCH] operations: innersource: Remove unpublished plugin Related: https://github.com/intel/dffml/pull/1481 Signed-off-by: John Andersen --- operations/innersource/.coveragerc | 13 - operations/innersource/.gitignore | 22 -- operations/innersource/LICENSE | 21 -- operations/innersource/MANIFEST.in | 3 - operations/innersource/README.rst | 1 - .../dffml_operations_innersource/__init__.py | 0 .../actions_validator.py | 86 ----- .../dffml_operations_innersource/cli.py | 232 ------------- .../npm_groovy_lint.py | 183 ---------- .../operations.py | 317 ------------------ .../dffml_operations_innersource/version.py | 1 - operations/innersource/entry_points.txt | 2 - operations/innersource/pyproject.toml | 22 -- operations/innersource/setup.cfg | 40 --- operations/innersource/setup.py | 8 - operations/innersource/tests/__init__.py | 0 .../innersource/tests/test_operations.py | 101 ------ 17 files changed, 1052 deletions(-) delete mode 100644 operations/innersource/.coveragerc delete mode 100644 operations/innersource/.gitignore delete mode 100644 operations/innersource/LICENSE delete mode 100644 operations/innersource/MANIFEST.in delete mode 100644 operations/innersource/README.rst delete mode 100644 operations/innersource/dffml_operations_innersource/__init__.py delete mode 100644 operations/innersource/dffml_operations_innersource/actions_validator.py delete mode 100644 operations/innersource/dffml_operations_innersource/cli.py delete mode 100644 operations/innersource/dffml_operations_innersource/npm_groovy_lint.py delete mode 100644 operations/innersource/dffml_operations_innersource/operations.py delete mode 100644 operations/innersource/dffml_operations_innersource/version.py delete mode 100644 operations/innersource/entry_points.txt delete mode 100644 operations/innersource/pyproject.toml delete mode 100644 operations/innersource/setup.cfg delete mode 100644 operations/innersource/setup.py delete mode 100644 operations/innersource/tests/__init__.py delete mode 100644 operations/innersource/tests/test_operations.py diff --git a/operations/innersource/.coveragerc b/operations/innersource/.coveragerc deleted file mode 100644 index 6c2f46b02b..0000000000 --- a/operations/innersource/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -source = - dffml_operations_innersource - tests -branch = True - -[report] -exclude_lines = - no cov - no qa - noqa - pragma: no cover - if __name__ == .__main__.: diff --git a/operations/innersource/.gitignore b/operations/innersource/.gitignore deleted file mode 100644 index 0edd3d2ad4..0000000000 --- a/operations/innersource/.gitignore +++ /dev/null @@ -1,22 +0,0 @@ -*.log -*.pyc -.cache/ -.coverage -.idea/ -.vscode/ -*.egg-info/ -build/ -dist/ -docs/build/ -venv/ -wheelhouse/ -*.egss -.mypy_cache/ -*.swp -.venv/ -.eggs/ -*.modeldir -*.db -htmlcov/ -built_html_docs/ -.tools/ diff --git a/operations/innersource/LICENSE b/operations/innersource/LICENSE deleted file mode 100644 index 61731767c7..0000000000 --- a/operations/innersource/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -Copyright (c) 2021 johnsa1 - -MIT License - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/operations/innersource/MANIFEST.in b/operations/innersource/MANIFEST.in deleted file mode 100644 index 4a485c3c31..0000000000 --- a/operations/innersource/MANIFEST.in +++ /dev/null @@ -1,3 +0,0 @@ -include README.rst -include LICENSE -recursive-include dffml_operations_innersource * diff --git a/operations/innersource/README.rst b/operations/innersource/README.rst deleted file mode 100644 index f6eeba643b..0000000000 --- a/operations/innersource/README.rst +++ /dev/null @@ -1 +0,0 @@ -../common/README.rst \ No newline at end of file diff --git a/operations/innersource/dffml_operations_innersource/__init__.py b/operations/innersource/dffml_operations_innersource/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/operations/innersource/dffml_operations_innersource/actions_validator.py b/operations/innersource/dffml_operations_innersource/actions_validator.py deleted file mode 100644 index 8cca11790d..0000000000 --- a/operations/innersource/dffml_operations_innersource/actions_validator.py +++ /dev/null @@ -1,86 +0,0 @@ -import logging -from typing import NewType - -import dffml - -# from .operations import ( -from dffml_operations_innersource.operations import ( - RepoDirectory, - ActionYAMLFileWorkflowUnixStylePath, -) - - -ActionsValidatorBinary = NewType("ActionsValidatorBinary", str) -ActionsValidatorResult = NewType("ActionsValidatorResult", dict) - - -@dffml.op -async def actions_validator( - repo_directory: RepoDirectory, - yaml_file_path: ActionYAMLFileWorkflowUnixStylePath, - actions_validator_binary: ActionsValidatorBinary, - *, - logger: logging.Logger = None, -) -> ActionsValidatorResult: - """ - >>> import asyncio - >>> import pathlib - >>> import tempfile - >>> - >>> import dffml - >>> from dffml_operations_innersource.actions_validator import ( - ... actions_validator, - ... ) - >>> from alice_test.shouldi.contribute.actions_validator import ( - ... ensure_actions_validator, - ... ) - >>> - >>> async def main(): - ... with tempfile.TemporaryDirectory() as tempdir: - ... yaml_file_path = pathlib.Path(tempdir).joinpath("action.yml") - ... await dffml.cached_download( - ... "https://raw.githubusercontent.com/mpalmer/action-validator/dd49fc0db4fc423b32704cc70ad80564d285ded7/test/002_basic_action/action.yml", - ... yaml_file_path, - ... "fddbaceb0c2d1779438f149db76896764c45a1adea3221b92e481c7a6a72c5ece33ccbb4ef42afc8d03d23b83d02ada9", - ... ) - ... actions_validator_binary = await ensure_actions_validator() - ... return await actions_validator( - ... tempdir, - ... yaml_file_path, - ... actions_validator_binary, - ... ) - >>> - >>> print(asyncio.run(main())) - True - """ - exit_code = -1 - stderr = "" - items = None - async for event, result in dffml.run_command_events( - [ - str(actions_validator_binary), - str(yaml_file_path), - ], - cwd=repo_directory, - logger=logger, - events=[ - dffml.Subprocess.STDOUT, - dffml.Subprocess.STDERR, - dffml.Subprocess.COMPLETED, - ], - raise_on_failure=False, - ): - if event is dffml.Subprocess.STDOUT and logger: - logger.debug("Passed validation: %s", result.decode()) - elif event is dffml.Subprocess.STDERR and logger: - stderr = result.decode() - logger.debug("Failed validation: %s", stderr) - # TODO Parse output into dict or data model - items = stderr - elif event is dffml.Subprocess.COMPLETED: - exit_code = result - return { - "pass": bool(exit_code == 0), - "exit_code": exit_code, - "items": items, - } diff --git a/operations/innersource/dffml_operations_innersource/cli.py b/operations/innersource/dffml_operations_innersource/cli.py deleted file mode 100644 index 7e215f6a8a..0000000000 --- a/operations/innersource/dffml_operations_innersource/cli.py +++ /dev/null @@ -1,232 +0,0 @@ -import sys -import json -import pathlib -import tempfile -import platform -import itertools -from typing import Dict, NewType - -import dffml - -import dffml_feature_git.feature.definitions -import dffml_feature_git.feature.operations - -from . import operations - - -@dffml.config -class EnsureTokeiConfig: - cache_dir: pathlib.Path = dffml.field("Cache directory to store downloads in",) - platform_urls: Dict[str, Dict[str, str]] = dffml.field( - "Mapping of platform.system() return values to tokei download URLs with hashes", - default_factory=lambda: { - "Linux": { - "url": "https://github.com/XAMPPRocky/tokei/releases/download/v10.1.1/tokei-v10.1.1-x86_64-unknown-linux-gnu.tar.gz", - "expected_hash": "b54fa0959e7a3a8935bd5cd86795b92e14d0a7b2cb6fb8f362b7b48198ce83e6dedc35a87e7c8fa405328f19d0ea6c47", - }, - "Darwin": { - "url": "https://github.com/XAMPPRocky/tokei/releases/download/v10.1.1/tokei-v10.1.1-x86_64-apple-darwin.tar.gz", - "expected_hash": "8c8a1d8d8dd4d8bef93dabf5d2f6e27023777f8553393e269765d7ece85e68837cba4374a2615d83f071dfae22ba40e2", - }, - }, - ) - - -import contextlib - - -@dffml.op( - config_cls=EnsureTokeiConfig, imp_enter={"stack": contextlib.AsyncExitStack,}, -) -async def ensure_tokei(self) -> str: - tokei = await dffml.cached_download_unpack_archive( - **{ - "file_path": self.parent.config.cache_dir.joinpath("tokei.tar.gz"), - "directory_path": self.parent.config.cache_dir.joinpath("tokei-download"), - # Use whatever values are appropriate for the system we are on - **self.parent.config.platform_urls[platform.system()], - } - ) - self.parent.stack.enter_context(dffml.prepend_to_path(tokei)) - return tokei.joinpath("tokei") - - -GitHubRepoID = NewType("GitHubRepoID", str) - - -@dffml.op -async def github_repo_id_to_clone_url( - self, repo_id: GitHubRepoID, -) -> dffml_feature_git.feature.definitions.URLType: - """ - Convert GitHub Integer Repository ID to Clonable URL. - """ - with tempfile.TemporaryDirectory() as tempdir: - # Write out the API query response to a file - api_response_contents_path = pathlib.Path(tempdir, "contents") - with open(api_response_contents_path , "wb") as stdout: - await dffml.run_command( - ["gh", "api", f"https://api.github.com/repositories/{repo_id}"], - stdout=stdout, - ) - stdout.seek(0) - # Parse in the response body as JSON - repository = json.loads(api_response_contents_path.read_text()) - return repository["clone_url"] - - -LocalRepoDirectory = NewType("LocalRepoDirectory", str) - - -@dffml.op( - inputs={ - "directory": LocalRepoDirectory, - }, - outputs={"repo": dffml_feature_git.feature.definitions.git_repository}, -) -async def local_repo_resolver( - self, directory: LocalRepoDirectory, -) -> dffml_feature_git.feature.definitions.git_repository.spec: - async for event, result in dffml.run_command_events( - ["git", "remote", "get-url", "origin"], - cwd=directory, - logger=self.logger, - raise_on_failure=False, - events=[dffml.Subprocess.STDOUT, dffml.Subprocess.COMPLETED], - ): - if event is dffml.Subprocess.STDOUT: - url = result.decode().strip() - if url.endswith(".git"): - url = url[:-4] - elif event is dffml.Subprocess.COMPLETED and result != 0: - raise RuntimeError("Failed to get local directory remote URL") - return {"repo": {"URL": url, "directory": directory}} - - -COLLECTOR_DATAFLOW = dffml.DataFlow( - dffml.GroupBy, - *dffml.opimp_in(dffml_feature_git.feature.operations), - *dffml.opimp_in(operations), - *dffml.opimp_in(sys.modules[__name__]), - # TODO(alice) Update to use the real overlay infra within run() - *itertools.chain( - *[ - dffml.object_to_operations(cls) - for cls in dffml.Overlay.load( - entrypoint="dffml.overlays.alice.shouldi.contribute", - ) - ], - ), - configs={ - ensure_tokei.op.name: EnsureTokeiConfig( - cache_dir=pathlib.Path( - ".tools", "open-architecture", "innersource", ".cache", "tokei", - ) - ), - }, -) -COLLECTOR_DATAFLOW.seed = [ - dffml.Input(value=1, definition=COLLECTOR_DATAFLOW.definitions["quarters"]), - dffml.Input( - value=True, definition=COLLECTOR_DATAFLOW.definitions["no_git_branch_given"], - ), - dffml.Input( - value=dict( - itertools.chain( - *[ - [ - (output.name, { - "group": output.name, - "by": "quarter", - "nostrict": True, - }) - for output in operation.outputs.values() - ] - for operation in COLLECTOR_DATAFLOW.operations.values() - ] - ) - ), - definition=COLLECTOR_DATAFLOW.definitions["group_by_spec"], - ), -] -COLLECTOR_DATAFLOW.operations[ - COLLECTOR_DATAFLOW.operations["lines_of_code_by_language"].name -] = COLLECTOR_DATAFLOW.operations[ - COLLECTOR_DATAFLOW.operations["lines_of_code_by_language"].name -]._replace( - conditions=[ensure_tokei.op.outputs["result"]] -) -COLLECTOR_DATAFLOW.update(auto_flow=True) -# Operations which should take inputs from other operations in flow and seed -# MUST have their input flow modified to add the seed origin to the allowlist. -for operation_name, (input_name, origins) in [ - (dffml_feature_git.feature.operations.clone_git_repo.op.name, ("URL", ["seed",])), - (dffml_feature_git.feature.operations.check_if_valid_git_repository_URL.op.name, ("URL", ["seed",])), -]: - COLLECTOR_DATAFLOW.flow[operation_name].inputs[input_name].extend(origins) -COLLECTOR_DATAFLOW.update_by_origin() - - -import copy -import dataclasses -import dffml.cli.dataflow - - -DEFAULT_SOURCE = dffml.JSONSource( - filename=pathlib.Path(".tools", "open-architecture", "innersource", "repos.json",), - readwrite=True, - allowempty=True, - mkdirs=True, -) - - -# NOTE When CLI and operations are merged: All this is the same stuff that will -# happen to Operation config_cls structures. We need a more ergonomic API to -# obsucre the complexity dataclasses introduces when modifying fields/defaults -# within subclasses. -for dffml_cli_class_name, field_modifications in { - "RunAllRecords": { - # metadata setting could be less awkward - "dataflow": {"default": COLLECTOR_DATAFLOW}, - "record_def": {"default": COLLECTOR_DATAFLOW.definitions["URL"].name}, - "sources": {"default_factory": lambda: dffml.Sources(DEFAULT_SOURCE)}, - }, - "RunRecordSet": { - "dataflow": {"default": COLLECTOR_DATAFLOW}, - "record_def": {"default": COLLECTOR_DATAFLOW.definitions["URL"].name}, - "sources": {"default_factory": lambda: dffml.Sources(DEFAULT_SOURCE)}, - }, - "Diagram": {"dataflow": {"default": COLLECTOR_DATAFLOW,},}, -}.items(): - # Create the class and config names by prepending InnerSource - new_class_name = "InnerSource" + dffml_cli_class_name - # Create a derived class - new_class = getattr(dffml.cli.dataflow, dffml_cli_class_name).subclass( - new_class_name, field_modifications, - ) - # Add our new class to the global namespace - setattr( - sys.modules[__name__], new_class.CONFIG.__qualname__, new_class.CONFIG, - ) - setattr( - sys.modules[__name__], new_class.__qualname__, new_class, - ) - - -class InnerSourceRunRecords(dffml.CMD): - """Run DataFlow and assign output to a record""" - - _set = InnerSourceRunRecordSet - _all = InnerSourceRunAllRecords - - -class InnerSourceRun(dffml.CMD): - """Run dataflow""" - - records = InnerSourceRunRecords - - -class InnerSourceCLI(dffml.CMD): - - run = InnerSourceRun - diagram = InnerSourceDiagram diff --git a/operations/innersource/dffml_operations_innersource/npm_groovy_lint.py b/operations/innersource/dffml_operations_innersource/npm_groovy_lint.py deleted file mode 100644 index eb6a49f228..0000000000 --- a/operations/innersource/dffml_operations_innersource/npm_groovy_lint.py +++ /dev/null @@ -1,183 +0,0 @@ -import json -import signal -import asyncio -import pathlib -import logging -import contextlib -from typing import NewType - -import dffml - -# from .operations import ( -from dffml_operations_innersource.operations import ( - RepoDirectory, - GroovyFileWorkflowUnixStylePaths, -) - - -NPMGroovyLintCMD = NewType("NPMGroovyLintCMD", list[str]) -NPMGroovyLintResult = NewType("NPMGroovyLintResult", str) -JavaBinary = NewType("JavaBinary", str) -CodeNarcServerProc = NewType("CodeNarcServerProc", object) -CodeNarcServerReturnCode = NewType("CodeNarcServerReturnCode", int) - - -class CouldNotResolvePathToNPMGroovyLintInstallError(Exception): - pass - - -class CodeNarcServerUnknownFailure(Exception): - pass - - -@contextlib.asynccontextmanager -async def code_narc_server( - java_binary: JavaBinary, - npm_groovy_lint_cmd: NPMGroovyLintCMD, - *, - env: dict = None, - logger: logging.Logger = None, -) -> CodeNarcServerProc: - # Path to compiled CodeNarcServer within released package - npm_groovy_lint_path = npm_groovy_lint_cmd[-1] - if isinstance(npm_groovy_lint_path, str): - npm_groovy_lint_path = pathlib.Path(npm_groovy_lint_path) - if not npm_groovy_lint_path.exists(): - npm_groovy_lint_path = dffml.which(npm_groovy_lint_path.name) - if not isinstance(npm_groovy_lint_path, pathlib.Path): - raise CouldNotResolvePathToNPMGroovyLintInstallError(npm_groovy_lint_cmd) - java_lib_path = npm_groovy_lint_path.resolve().parents[1].joinpath( - "lib", "java", - ) - # Run the server - proc = None - # TODO Port is currently hardcoded, recompile? src/ files in npm-groovy-lint - async for event, result in dffml.run_command_events( - [ - - java_binary, - "-Djava.net.useSystemProxies=true", - "-Xms256m", - "-Xmx2048m", - "-cp", - ( - str(java_lib_path.joinpath("CodeNarcServer.jar").resolve()) - + ":" - + str(java_lib_path.joinpath("*").resolve()) - ), - "com.nvuillam.CodeNarcServer", - "--server", - r"includes='{}/.groovy'", - ], - env=env, - logger=logger, - events=[ - dffml.Subprocess.CREATED, - dffml.Subprocess.COMPLETED, - ], - raise_on_failure=False, - ): - if event is dffml.Subprocess.CREATED: - proc = result - # TODO Ask for STDOUT_READLINE and wait to yield until we know we - # can hit the HTTP server? - try: - yield proc - finally: - # Send Ctrl-C to exit cleanly - with contextlib.suppress(ProcessLookupError): - proc.send_signal(signal.SIGINT) - elif event is dffml.Subprocess.COMPLETED: - # Clean exit triggered by Ctrl-C will have a return code as follows - if result not in (130, -2): - raise CodeNarcServerUnknownFailure(f"Exit code: {result}") - - -@dffml.op -async def start_code_narc_server( - java_binary: JavaBinary, - npm_groovy_lint_cmd: NPMGroovyLintCMD, - *, - env: dict = None, - logger: logging.Logger = None, -) -> CodeNarcServerProc: - proc_context_manager = code_narc_server( - java_binary, - npm_groovy_lint_cmd, - env=env, - logger=logger, - ) - proc_context_manager.proc = await proc_context_manager.__aenter__() - return proc_context_manager - - -@dffml.op( - stage=dffml.Stage.CLEANUP, -) -async def stop_code_narc_server( - proc: CodeNarcServerProc, - *, - env: dict = None, - logger: logging.Logger = None, -) -> CodeNarcServerReturnCode: - await proc.__aexit__(None, None, None) - return proc.proc.returncode - - -@dffml.op -async def npm_groovy_lint( - repo_directory: RepoDirectory, - java_binary: JavaBinary, - # TODO Port for code narc is currently hardcoded, upstream fix and use here. - _code_narc_proc: CodeNarcServerProc, - npm_groovy_lint_cmd: NPMGroovyLintCMD, - groovy_paths: GroovyFileWorkflowUnixStylePaths, - *, - env: dict = None, - logger: logging.Logger = None, -) -> NPMGroovyLintResult: - if not groovy_paths: - return - # Check for config file - config_args = [] - npmgroovylintrc_paths = list(pathlib.Path(repo_directory).rglob(".groovylintrc.json")) - if npmgroovylintrc_paths: - if logger and len(npmgroovylintrc_paths) > 1: - logger.warning("Choosing first config file of multiple found: %r", npmgroovylintrc_paths) - config_args = ["--config", npmgroovylintrc_paths[0]] - cmd = [ - *npm_groovy_lint_cmd, - *config_args, - "--noserver", - # It will try to install java unless we give it one - "--javaexecutable", - java_binary, - "--output", - "json", - "--", - *groovy_paths, - ] - if logger: - logger.debug("cmd: %r", cmd) - proc = await asyncio.create_subprocess_exec( - *cmd, - cwd=repo_directory, - env=env, - stdout=asyncio.subprocess.PIPE, - ) - work = { - asyncio.create_task(proc.wait()): "wait", - asyncio.create_task(proc.communicate()): "communicate", - } - async for event, result in dffml.concurrently(work): - if event == "communicate": - parsed_result = json.loads(result[0]) - return { - **parsed_result, - **{ - "files": { - str(pathlib.Path(path).relative_to(repo_directory)): value - for path, value in parsed_result.get("files", {}).items() - } - } - } diff --git a/operations/innersource/dffml_operations_innersource/operations.py b/operations/innersource/dffml_operations_innersource/operations.py deleted file mode 100644 index 586cc1a4e3..0000000000 --- a/operations/innersource/dffml_operations_innersource/operations.py +++ /dev/null @@ -1,317 +0,0 @@ -import pathlib -import logging -import datetime -import itertools -from typing import List, NewType - -import dffml -from dffml_feature_git.feature.definitions import ( - git_repository_checked_out, - quarter_start_date, -) - - -GitHubActionsWorkflowUnixStylePath = NewType("GitHubActionsWorkflowUnixStylePath", str) -JenkinsfileWorkflowUnixStylePath = NewType("JenkinsfileWorkflowUnixStylePath", str) -GroovyFileWorkflowUnixStylePath = NewType("GroovyFileWorkflowUnixStylePath", str) -GroovyFileWorkflowUnixStylePaths = NewType("GroovyFileWorkflowUnixStylePaths", list[GroovyFileWorkflowUnixStylePath ]) -ActionYAMLFileWorkflowUnixStylePath = NewType("ActionYAMLFileWorkflowUnixStylePath", str) -ActionYAMLFileWorkflowUnixStylePaths = NewType("ActionYAMLFileWorkflowUnixStylePaths", list[ActionYAMLFileWorkflowUnixStylePath]) -IsGitHubAction = NewType("IsGitHubAction", bool) -IsJenkinsLibrary = NewType("IsJenkinsLibrary", bool) - - -def relative_paths( - directory: str, - paths: List[str], -): - return [ - path.relative_to(directory) - for path in paths - ] - - -@dffml.op( - inputs={"repo": git_repository_checked_out,}, - outputs={"result": GitHubActionsWorkflowUnixStylePath}, - expand=["result"], -) -def github_workflows(self, repo: git_repository_checked_out.spec) -> dict: - return { - "result": map( - str, - relative_paths( - repo.directory, - pathlib.Path(repo.directory, ".github", "workflows").glob("*.yml"), - ), - ), - } - - -@dffml.op( - inputs={"repo": git_repository_checked_out,}, - outputs={"result": JenkinsfileWorkflowUnixStylePath}, - expand=["result"], -) -def jenkinsfiles(self, repo: git_repository_checked_out.spec) -> dict: - return { - "result": map( - str, - relative_paths( - repo.directory, - pathlib.Path(repo.directory).rglob("**/*Jenkinsfile") - ), - ), - } - - -@dffml.op( - inputs={"repo": git_repository_checked_out,}, - outputs={ - "is_jenkins_library": IsJenkinsLibrary, - "groovy_files": GroovyFileWorkflowUnixStylePaths, - "groovy_file": GroovyFileWorkflowUnixStylePath, - }, - expand=["groovy_file"], -) -def groovy_files(self, repo: git_repository_checked_out.spec) -> dict: - list_of_groovy_files = list( - map( - str, - relative_paths( - repo.directory, - [ - *pathlib.Path(repo.directory).rglob("vars/*.groovy"), - *pathlib.Path(repo.directory).rglob("src/**/*.groovy"), - ], - ), - ), - ) - return { - "is_jenkins_library": bool(list_of_groovy_files), - "groovy_files": list_of_groovy_files, - "groovy_file": list_of_groovy_files, - } - -@dffml.op( - inputs={"repo": git_repository_checked_out,}, - outputs={ - "actions": ActionYAMLFileWorkflowUnixStylePaths, - "action": ActionYAMLFileWorkflowUnixStylePath, - "is_github_action": IsGitHubAction, - }, - expand=["action"], -) -def action_yml_files(self, repo: git_repository_checked_out.spec) -> dict: - list_of_action_yml_files = list( - pathlib.Path(repo.directory).rglob("**/action.yml") - ) - # Remove YAML files that are not GitHub Actions (for example if someone - # named a workflow action.yml). - remove_paths = set() - for action_path in list_of_action_yml_files: - action_text = action_path.read_text(errors="backslashreplace") - action_text = action_text.replace("\r", "") - # Look for runs: at top level - if not "runs:" in action_text.split("\n"): - remove_paths.add(action_path) - for remove_path in remove_paths: - list_of_action_yml_files.remove(remove_path) - # Conver to repo relative paths - list_of_action_yml_files = list( - map( - str, - relative_paths( - repo.directory, - list_of_action_yml_files, - ), - ), - ) - return { - "is_github_action": bool(list_of_action_yml_files), - "actions": list_of_action_yml_files, - "action": list_of_action_yml_files, - } - - -FileReadmePresent = NewType("FileReadmePresent", bool) -FileContributingPresent = NewType("FileContributingPresent", bool) -FileCodeOfConductPresent = NewType("FileCodeOfConductPresent", bool) -FileSecurityPresent = NewType("FileSecurityPresent", bool) -FileSupportPresent = NewType("FileSupportPresent", bool) - - -@dffml.op(inputs={"repo": git_repository_checked_out,},) -def readme_present(self, repo: git_repository_checked_out.spec) -> FileReadmePresent: - return any( - [ - path - for path in pathlib.Path(repo.directory).iterdir() - if "readme" == path.stem.lower() - ] - ) - - -@dffml.op(inputs={"repo": git_repository_checked_out,},) -def contributing_present(self, repo: git_repository_checked_out.spec) -> FileContributingPresent: - return any( - [ - pathlib.Path(repo.directory, "CONTRIBUTING.md").is_file(), - pathlib.Path(repo.directory, "CONTRIBUTING.rst").is_file() - ] - ) - - -# TODO Check compliance with RFC 9116 -@dffml.op(inputs={"repo": git_repository_checked_out,},) -def security_present(self, repo: git_repository_checked_out.spec) -> FileSecurityPresent: - return any( - [ - pathlib.Path(repo.directory, "SECURITY.md").is_file(), - pathlib.Path(repo.directory, "SECURITY.rst").is_file(), - pathlib.Path(repo.directory, "SECURITY.txt").is_file(), - pathlib.Path(repo.directory, "security.txt").is_file(), - ] - ) - - -@dffml.op(inputs={"repo": git_repository_checked_out,},) -def support_present(self, repo: git_repository_checked_out.spec) -> FileSupportPresent: - return any( - [ - pathlib.Path(repo.directory, "SUPPORT.md").is_file(), - pathlib.Path(repo.directory, "SUPPORT.rst").is_file(), - ] - ) - - -@dffml.op(inputs={"repo": git_repository_checked_out,},) -def code_of_conduct_present(self, repo: git_repository_checked_out.spec) -> FileCodeOfConductPresent: - return any( - [ - pathlib.Path(repo.directory, "CODE_OF_CONDUCT.md").is_file(), - pathlib.Path(repo.directory, "CODE_OF_CONDUCT.rst").is_file(), - ] - ) - - -# TODO Auto definition code which is about to undergo refactor will fix up this -# oddness with typing and half abilty to have auto inputs with types. -@dffml.op(inputs={}, outputs={"result": quarter_start_date}) -def get_current_datetime_as_git_date(): - return { - "result": datetime.datetime.now().strftime("%Y-%m-%d %H:%M"), - } - - -@dffml.config -class MaintainedConfig: - commits: int = dffml.field( - "Equal or greater to this number of commits in the last quarter results in a return value of True", - default=1, - ) - - -@dffml.op( - inputs={ - "results": dffml.GroupBy.op.outputs["output"], - }, - config_cls=MaintainedConfig, - stage=dffml.Stage.OUTPUT, -) -def maintained(results: dict) -> bool: - # As an example, if there is one commit in the last period (quarter), return - # maintained (True for the maintained opreation for this input data). - if results["commits"][-1] >= self.config_cls.commits: - return True - - -@dffml.config -class UnmaintainedConfig: - commits: int = dffml.field( - "Any less than this number of commits in the last quarter results in a return value of True", - default=1, - ) - - -@dffml.op( - inputs={ - "results": dffml.GroupBy.op.outputs["output"], - }, - stage=dffml.Stage.OUTPUT, - config_cls=UnmaintainedConfig, -) -def unmaintained(self, results: dict) -> bool: - # As an example, if there are no commits in the last quarter, return - # unmaintained (True for the unmaintained opreation for this input data). - if results["commits"][-1] < self.config_cls.commits: - return True - - -# TODO We may not need stage anymore, need to see if we should depricate -@dffml.op( - stage=dffml.Stage.OUTPUT, conditions=[maintained.op.outputs["result"]], -) -def badge_maintained() -> str: - return "https://img.shields.io/badge/Maintainance-Active-green" - - -@dffml.op( - stage=dffml.Stage.OUTPUT, conditions=[unmaintained.op.outputs["result"]], -) -def badge_unmaintained() -> str: - return "https://img.shields.io/badge/Maintainance-Inactive-red" - - -RepoDirectory = NewType("RepoDirectory", str) - - -@dffml.op( - inputs={"repo": git_repository_checked_out,}, - outputs={"result": RepoDirectory}, -) -def repo_directory(self, repo: git_repository_checked_out.spec) -> RepoDirectory: - # How did this not exist? I think it does somwhere else, another branch - return {"result": repo.directory} - - -RepoURL = NewType("RepoURL", str) - - -@dffml.op( - inputs={"repo": git_repository_checked_out,}, - outputs={"result": RepoURL}, -) -def repo_url(self, repo: git_repository_checked_out.spec) -> RepoURL: - """ - Helper opertion to expose repo URL of checked out repo object. - - TODO Remove this in favor of some kind of mapping extract style on objects - ref engineering logs for more notes on @op.mapping.extract style decorator. - """ - return {"result": repo.URL} - - -HasDocs = NewType("HasDocs", dict) - - -@dffml.op -def has_docs( - repo_directory: RepoDirectory, - readme_present: FileReadmePresent, - *, - logger: logging.Logger = None, -) -> HasDocs: - # TODO Refactor this, ideally support regex and or open policy agent - check_files_or_strings = ("support", "usage", "example", "known issues") - output = dict(zip(["readme_present", *check_files_or_strings], [False] * 5)) - for path in pathlib.Path(repo_directory).iterdir(): - if "readme" == path.stem.lower(): - output["readme_present"] = True - for check in check_files_or_strings: - if check in path.read_text(errors='backslashreplace').lower(): - output[check] = True - for check in check_files_or_strings: - if check.replace(" ", "_") == path.stem.lower(): - output[check] = True - return output diff --git a/operations/innersource/dffml_operations_innersource/version.py b/operations/innersource/dffml_operations_innersource/version.py deleted file mode 100644 index 901e5110b2..0000000000 --- a/operations/innersource/dffml_operations_innersource/version.py +++ /dev/null @@ -1 +0,0 @@ -VERSION = "0.0.1" diff --git a/operations/innersource/entry_points.txt b/operations/innersource/entry_points.txt deleted file mode 100644 index 6843b583b5..0000000000 --- a/operations/innersource/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[dffml.service.cli] -innersource = dffml_operations_innersource.cli:InnerSourceCLI diff --git a/operations/innersource/pyproject.toml b/operations/innersource/pyproject.toml deleted file mode 100644 index 17b1235941..0000000000 --- a/operations/innersource/pyproject.toml +++ /dev/null @@ -1,22 +0,0 @@ -requires = ["setuptools>=44", "wheel", "setuptools_scm[toml]>=3.4.3"] -build-backend = "setuptools.build_meta" - -[tool.setuptools_scm] - -[tool.black] -exclude = ''' -( - /( - \.eggs # exclude a few common directories in the - | \.git # root of the project - | \.hg - | \.mypy_cache - | \.tox - | \.venv - | _build - | buck-out - | build - | dist - ) -) -''' diff --git a/operations/innersource/setup.cfg b/operations/innersource/setup.cfg deleted file mode 100644 index bbe2596188..0000000000 --- a/operations/innersource/setup.cfg +++ /dev/null @@ -1,40 +0,0 @@ -[metadata] -name = dffml-operations-innersource -version = attr: dffml_operations_innersource.version.VERSION -description = DFFML Operations for measureing org health -long_description = file: README.rst -author = John Andersen -author_email = johnandersenpdx@gmail.com -maintainer = John Andersen -maintainer_email = johnandersenpdx@gmail.com -url = https://github.com/intel/dffml/blob/master/operations/innersource -license = MIT -keywords = dffml -classifiers = - Development Status :: 3 - Alpha - Intended Audience :: Developers - License :: OSI Approved :: MIT License - Natural Language :: English - Operating System :: OS Independent - Programming Language :: Python :: 3 :: Only - Programming Language :: Python :: 3.7 - Programming Language :: Python :: Implementation :: CPython - Programming Language :: Python :: Implementation :: PyPy - -[options] -zip_safe = False -include_package_data = True -packages = find: -entry_points = file: entry_points.txt -install_requires = - dffml>=0.4.0 - pyyaml>=6.0 - -[options.extras_require] -dev = - coverage - codecov - sphinx - twine - black==19.10b0 - importlib_metadata>=4.8.1;python_version<"3.8" diff --git a/operations/innersource/setup.py b/operations/innersource/setup.py deleted file mode 100644 index 17542f4d0e..0000000000 --- a/operations/innersource/setup.py +++ /dev/null @@ -1,8 +0,0 @@ -import sys -import site -import setuptools - -# See https://github.com/pypa/pip/issues/7953 -site.ENABLE_USER_SITE = "--user" in sys.argv[1:] - -setuptools.setup() diff --git a/operations/innersource/tests/__init__.py b/operations/innersource/tests/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/operations/innersource/tests/test_operations.py b/operations/innersource/tests/test_operations.py deleted file mode 100644 index cfd08b006f..0000000000 --- a/operations/innersource/tests/test_operations.py +++ /dev/null @@ -1,101 +0,0 @@ -import sys -import copy -import pathlib -import itertools - -from dffml.df.types import Input, DataFlow -from dffml.df.base import opimp_in -from dffml.df.memory import MemoryOrchestrator -from dffml.operation.output import GetSingle -from dffml.util.asynctestcase import AsyncTestCase - -from dffml_operations_innersource.operations import * -from dffml_feature_git.feature.operations import ( - check_if_valid_git_repository_URL, - clone_git_repo, - cleanup_git_repo, -) - -OPIMPS = opimp_in(sys.modules[__name__]) - -DFFML_ROOT_DIR = pathlib.Path(__file__).parents[3] - -DATAFLOW = DataFlow.auto( - *OPIMPS, -) - - -class TestOperations(AsyncTestCase): - async def test_run(self): - dataflow = copy.deepcopy(DATAFLOW) - # Tell the dataflow to accept repo inputs with an origin of seed (the - # default origin for when inputs are added on dataflow start). Where the - # input definition name is the name of the repo definition. - dataflow.flow[github_workflow_present.op.name].inputs["repo"] += [ - {"seed": [github_workflow_present.op.inputs["repo"].name]}, - ] - # Update flow mappings - dataflow.update() - await self.assertRunDataFlow(dataflow, { - "dffml": ( - [ - Input( - value=github_workflow_present.op.inputs["repo"].spec( - directory=DFFML_ROOT_DIR, - ), - definition=github_workflow_present.op.inputs["repo"], - ), - Input( - value=list(itertools.chain(*[ - [ - definition.name - for definition in opimp.op.outputs.values() - ] - for opimp in OPIMPS - # The operations we don't care to compare outputs - if opimp.op.name not in [ - GetSingle.op.name, - clone_git_repo.op.name, - check_if_valid_git_repository_URL.op.name, - ] - ])), - definition=GetSingle.op.inputs["spec"], - ), - ], - { - github_workflow_present.op.outputs["result"].name: True - }, - ) - }) - - async def test_on_repos(self): - dataflow = copy.deepcopy(DATAFLOW) - await self.assertRunDataFlow(dataflow, { - "dffml": ( - [ - Input( - value="https://github.com/pdxjohnny/httptest", - definition=clone_git_repo.op.inputs["URL"], - ), - Input( - value=list(itertools.chain(*[ - [ - definition.name - for definition in opimp.op.outputs.values() - ] - for opimp in OPIMPS - # The operations we don't care to compare outputs - if opimp.op.name not in [ - GetSingle.op.name, - clone_git_repo.op.name, - check_if_valid_git_repository_URL.op.name, - ] - ])), - definition=GetSingle.op.inputs["spec"], - ), - ], - { - github_workflow_present.op.outputs["result"].name: True, - }, - ) - })