From 35f205df5269b79f01aa6ea8d0f48335afaa8ebe Mon Sep 17 00:00:00 2001 From: Francisco Rivera Date: Mon, 9 Oct 2023 19:14:00 -0300 Subject: [PATCH] SSH files for private repos (#213) * first approach: copy ssh files and use live containers * raise error message in case of unknown host * tests * fix tests * debugging CI * debugging CI 2 * force a command * tail alone * ubuntu 20.04 * skip auth if we don't have to * reverting debugging stuff * clarify function documentation * project.yaml validations on "project create" * timeout test * Revert "project.yaml validations on "project create"" This reverts commit 38381b413a623f01b3ea3477877762002125cf88. * use a pty to exec the init command + adapt tests * tty true by default * no interactive * no interactive on CI --- .../workflows/tests-unit-and-integration.yml | 4 + leverage/_utils.py | 98 ++++++++++++++----- leverage/container.py | 28 +++++- leverage/logger.py | 5 +- leverage/modules/terraform.py | 24 ++++- tests/test_modules/test_run.py | 5 +- tests/test_modules/test_terraform.py | 57 +++++++++++ 7 files changed, 187 insertions(+), 34 deletions(-) create mode 100644 tests/test_modules/test_terraform.py diff --git a/.github/workflows/tests-unit-and-integration.yml b/.github/workflows/tests-unit-and-integration.yml index e6b0ea9f..ee9aa84a 100644 --- a/.github/workflows/tests-unit-and-integration.yml +++ b/.github/workflows/tests-unit-and-integration.yml @@ -138,6 +138,8 @@ jobs: working-directory: ../theadamproject - name: Test Terraform + env: + LEVERAGE_INTERACTIVE: 0 run: | printf "[INFO] Testing terraform\n" @@ -194,6 +196,8 @@ jobs: working-directory: ../theblairwitchproject - name: Test Testing Reference Architecture + env: + LEVERAGE_INTERACTIVE: 0 run: | printf "[INFO] Initializing layer\n" diff --git a/leverage/_utils.py b/leverage/_utils.py index ca130720..322bc718 100644 --- a/leverage/_utils.py +++ b/leverage/_utils.py @@ -1,12 +1,19 @@ """ General use utilities. """ +import io +import os +import tarfile +from pathlib import Path from subprocess import run from subprocess import PIPE from click.exceptions import Exit +from docker import DockerClient +from docker.models.containers import Container from leverage import logger +from leverage.logger import raw_logger def clean_exception_traceback(exception): @@ -82,25 +89,10 @@ def __exit__(self, *args, **kwargs): class AwsCredsEntryPoint(CustomEntryPoint): """ Fetching AWS credentials by setting the SSO/MFA entrypoints. - This works as a replacement of _prepare_container. """ def __init__(self, container, override_entrypoint=None): - if container.sso_enabled: - container._check_sso_token() - auth_method = f"{container.TF_SSO_ENTRYPOINT} -- " - elif container.mfa_enabled: - auth_method = f"{container.TF_MFA_ENTRYPOINT} -- " - container.environment.update( - { - "AWS_SHARED_CREDENTIALS_FILE": container.environment["AWS_SHARED_CREDENTIALS_FILE"].replace( - "tmp", ".aws" - ), - "AWS_CONFIG_FILE": container.environment["AWS_CONFIG_FILE"].replace("tmp", ".aws"), - } - ) - else: - auth_method = "" + auth_method = container.auth_method() new_entrypoint = f"{auth_method}{container.entrypoint if override_entrypoint is None else override_entrypoint}" super(AwsCredsEntryPoint, self).__init__(container, entrypoint=new_entrypoint) @@ -120,6 +112,33 @@ def __exit__(self, *args, **kwargs): self.container.change_file_ownership(self.container.guest_aws_credentials_dir) +class AwsCredsContainer: + """ + Fetching AWS credentials by setting the SSO/MFA entrypoints on a living container. + This flow runs a command on a living container before any other command, leaving your AWS credentials ready + for authentication. + + In the case of MFA, the env var tweaks (that happens at .auth_method()) must stay until the end of the block + given the container is reused for more commands. + """ + + def __init__(self, container: Container, tf_container): + self.container = container + self.tf_container = tf_container + + def __enter__(self): + auth_method = self.tf_container.auth_method() + if not auth_method: + return + + exit_code, output = self.container.exec_run(auth_method, environment=self.tf_container.environment) + raw_logger.info(output.decode("utf-8")) + + def __exit__(self, *args, **kwargs): + # now return file ownership on the aws credentials files + self.tf_container.change_file_ownership(self.tf_container.guest_aws_credentials_dir) + + class ExitError(Exit): """ Raise an Exit exception but also print an error description. @@ -136,13 +155,48 @@ class ContainerSession: Useful when you need to keep your container alive to share context between multiple commands. """ - def __init__(self, docker_client, container): + def __init__(self, docker_client: DockerClient, container_data): self.docker_client = docker_client - self.container = container + self.container_data = container_data - def __enter__(self): - self.docker_client.api.start(self.container) + def __enter__(self) -> Container: + self.docker_client.api.start(self.container_data) + return self.docker_client.containers.get(self.container_data["Id"]) def __exit__(self, exc_type, exc_value, exc_tb): - self.docker_client.api.stop(self.container) - self.docker_client.api.remove_container(self.container) + self.docker_client.api.stop(self.container_data) + self.docker_client.api.remove_container(self.container_data) + + +class LiveContainer(ContainerSession): + """ + A container that run a command that "do nothing" indefinitely. The idea is to keep the container alive. + """ + + COMMAND = "tail -f /dev/null" + + def __init__(self, leverage_container, tty=True): + with CustomEntryPoint(leverage_container, self.COMMAND): + container_data = leverage_container._create_container(tty) + super().__init__(leverage_container.client, container_data) + + +def tar_directory(host_dir_path: Path) -> bytes: + """ + Compress a local directory on memory as a tar file and return it as bytes. + """ + bytes_array = io.BytesIO() + with tarfile.open(fileobj=bytes_array, mode="w") as tar_handler: + # walk through the directory tree + for root, dirs, files in os.walk(host_dir_path): + for f in files: + # and add each file to the tar file + file_path = Path(os.path.join(root, f)) + tar_handler.add( + os.path.join(root, f), + arcname=file_path.relative_to(host_dir_path), + ) + + bytes_array.seek(0) + # return the whole tar file as a byte array + return bytes_array.read() diff --git a/leverage/container.py b/leverage/container.py index af9f7c2a..16c48555 100644 --- a/leverage/container.py +++ b/leverage/container.py @@ -1,6 +1,5 @@ import json import os -import pwd import re import webbrowser from pathlib import Path @@ -13,7 +12,7 @@ from docker import DockerClient from docker.errors import APIError, NotFound from docker.types import Mount -from typing import Tuple, Union, List +from typing import Tuple, Union from leverage import __toolbox_version__ from leverage import logger @@ -36,8 +35,6 @@ r"(.*)" # layer ) -raw_logger = raw_logger() - def get_docker_client(): """Attempt to get a Docker client from the environment configuration. Halt application otherwise. @@ -554,6 +551,29 @@ def __init__(self, client): logger.debug(f"[bold cyan]Container configuration:[/bold cyan]\n{json.dumps(self.container_config, indent=2)}") + def auth_method(self) -> str: + """ + Return the expected auth method based on the SSO or MFA flags. + + In the case of MFA, we also need to tweak some env variables for AWS credentials. + Once you are done with authentication, remember to revert the env var changes. + """ + if self.sso_enabled: + self._check_sso_token() + return f"{self.TF_SSO_ENTRYPOINT} -- " + elif self.mfa_enabled: + self.environment.update( + { + "AWS_SHARED_CREDENTIALS_FILE": self.environment["AWS_SHARED_CREDENTIALS_FILE"].replace( + "tmp", ".aws" + ), + "AWS_CONFIG_FILE": self.environment["AWS_CONFIG_FILE"].replace("tmp", ".aws"), + } + ) + return f"{self.TF_MFA_ENTRYPOINT} -- " + + return "" + @property def tf_cache_dir(self): return os.getenv("TF_PLUGIN_CACHE_DIR") diff --git a/leverage/logger.py b/leverage/logger.py index f273932b..febd9719 100644 --- a/leverage/logger.py +++ b/leverage/logger.py @@ -165,7 +165,7 @@ def get_tasks_logger(): return logger -def raw_logger(): +def _raw_logger(): """ Provide a raw logger, in case we need to print stuff that already comes formatted (like some container logs). """ @@ -177,3 +177,6 @@ def raw_logger(): logger.addHandler(handler) return logger + + +raw_logger = _raw_logger() diff --git a/leverage/modules/terraform.py b/leverage/modules/terraform.py index ce56e70d..3e7c8294 100644 --- a/leverage/modules/terraform.py +++ b/leverage/modules/terraform.py @@ -1,5 +1,7 @@ +import os import re +import dockerpty import hcl2 import click from click.exceptions import Exit @@ -7,6 +9,7 @@ from leverage import logger from leverage._internals import pass_state from leverage._internals import pass_container +from leverage._utils import tar_directory, AwsCredsContainer, LiveContainer from leverage.container import get_docker_client from leverage.container import TerraformContainer @@ -304,10 +307,25 @@ def _init(tf, args): ] args.append(f"-backend-config={tf.backend_tfvars}") - exit_code = tf.start_in_layer("init", *args) + tf.check_for_layer_location() - if exit_code: - raise Exit(exit_code) + with LiveContainer(tf) as container: + # create the .ssh directory + container.exec_run("mkdir -p /root/.ssh") + # copy the entire ~/.ssh/ folder + tar_bytes = tar_directory(tf.home / ".ssh") + # into /root/.ssh + container.put_archive("/root/.ssh/", tar_bytes) + # correct the owner of the files to match with the docker internal user + container.exec_run("chown root:root -R /root/.ssh/") + + with AwsCredsContainer(container, tf): + dockerpty.exec_command( + client=tf.client.api, + container=container.id, + command="terraform init " + " ".join(args), + interactive=bool(int(os.environ.get("LEVERAGE_INTERACTIVE", 1))), + ) @pass_container diff --git a/tests/test_modules/test_run.py b/tests/test_modules/test_run.py index 6f7178a0..c26a0d57 100644 --- a/tests/test_modules/test_run.py +++ b/tests/test_modules/test_run.py @@ -42,10 +42,7 @@ def test__prepare_tasks_to_run_checks_task_existence(): ), ], ) -def test__prepare_tasks_to_run_handles_bad_arguments(input_task, message, caplog, with_click_context): - _configure_logger(logger=_leverage_logger) - _leverage_logger.propagate = True - +def test__prepare_tasks_to_run_handles_bad_arguments(input_task, message, muted_click_context, propagate_logs, caplog): with pytest.raises(Exit): _prepare_tasks_to_run(module=None, input_tasks=[input_task]) diff --git a/tests/test_modules/test_terraform.py b/tests/test_modules/test_terraform.py new file mode 100644 index 00000000..5c530198 --- /dev/null +++ b/tests/test_modules/test_terraform.py @@ -0,0 +1,57 @@ +from unittest.mock import patch, Mock + +import pytest +from click import get_current_context + +from leverage._internals import State +from leverage._utils import AwsCredsContainer +from leverage.container import TerraformContainer +from leverage.modules.terraform import _init +from tests.test_containers import container_fixture_factory + + +@pytest.fixture +def terraform_container(muted_click_context): + tf_container = container_fixture_factory(TerraformContainer) + + # this is required because of the @pass_container decorator + ctx = get_current_context() + state = State() + state.container = tf_container + ctx.obj = state + + # assume we are on a valid location + with patch.object(TerraformContainer, "check_for_layer_location", Mock()): + # assume we have valid credentials + with patch.object(AwsCredsContainer, "__enter__", Mock()): + yield tf_container + + +def test_init(terraform_container): + """ + Test happy path. + """ + live_container = Mock() + with patch("leverage._utils.LiveContainer.__enter__", return_value=live_container): + with patch("dockerpty.exec_command") as mocked_pty: + _init([]) + + assert live_container.exec_run.call_args_list[0].args[0] == "mkdir -p /root/.ssh" + assert live_container.exec_run.call_args_list[1].args[0] == "chown root:root -R /root/.ssh/" + assert ( + mocked_pty.call_args_list[0].kwargs["command"] + == f"terraform init -backend-config=/project/./config/backend.tfvars" + ) + + +def test_init_with_args(terraform_container): + """ + Test tf init with arguments. + """ + with patch("dockerpty.exec_command") as mocked_pty: + _init(["-migrate-state"]) + + assert ( + mocked_pty.call_args_list[0].kwargs["command"] + == f"terraform init -migrate-state -backend-config=/project/./config/backend.tfvars" + )