Skip to content

Commit

Permalink
Merge pull request IntegriChain1#14 from norton120/DC-131-constants
Browse files Browse the repository at this point in the history
Adds Global Constants
  • Loading branch information
norton120 authored Jan 25, 2019
2 parents 2475627 + f5f91da commit 1ed7675
Show file tree
Hide file tree
Showing 9 changed files with 54 additions and 31 deletions.
1 change: 1 addition & 0 deletions core/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
__project__ = 'core'
__version__ = '0.0.1'

9 changes: 5 additions & 4 deletions core/cli.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import click
from git import Repo
from core.helpers import docker, notebook
from core.constants import DOCKER_REPO, AWS_ACCOUNT


DOCKER_REPO = 'ichain/core'

@click.group()
def cli(): # pragma: no cover
Expand All @@ -19,7 +20,7 @@ def add(a, b):
@click.argument('env', type=click.Choice(['local']))
def publish(env):
if env == 'local':
AWS_ACCOUNT_ID = "687531504312"
AWS_ACCOUNT_ID = AWS_ACCOUNT
repo = Repo('.')
branch_name = repo.active_branch.name
print("Hang tight building this image can take a while...")
Expand All @@ -35,7 +36,7 @@ def publish(env):
@click.argument('env', type=click.Choice(['local']))
def tidy(env):
if env == 'local':
AWS_ACCOUNT_ID = "687531504312"
AWS_ACCOUNT_ID = AWS_ACCOUNT
repo = Repo('.')
branch_name = repo.active_branch.name
full_tag = f'{DOCKER_REPO}:{branch_name}'
Expand All @@ -55,4 +56,4 @@ def tidy(env):
def run(env, id, input_contract, output_contract):
notebook_url = notebook.run_transform(env, id, input_contract, output_contract)
print("See your notebook output at:")
print(notebook_url)
print(notebook_url)
9 changes: 9 additions & 0 deletions core/constants.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
import os
import yaml
from .helpers.project_root import ProjectRoot

config_file = os.path.join(ProjectRoot().get_path(),'core_project.yaml')
with open(config_file) as _file:
config = yaml.safe_load(_file)
for k, v in config.items():
globals()[k] = v
8 changes: 4 additions & 4 deletions core/contract.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import logging
from git import Repo
from core.helpers.s3_naming_helper import S3NamingHelper as s3Name

from core.constants import DEV_BUCKET, PROD_BUCKET, UAT_BUCKET

class Contract:
''' The s3 contract is how we structure our data lake.
Expand All @@ -28,9 +28,9 @@ class Contract:
SUB-PARTITION - for datasets, the sub-partitions add additional partitioning with additional prefixes
FILENAME - nondescript in the contract
'''
DEV = 'ichain-development'
PROD = 'ichain-production'
UAT = 'ichain-uat'
DEV = DEV_BUCKET
PROD = PROD_BUCKET
UAT = UAT_BUCKET
STATES = ['raw', 'ingest', 'master', 'enhance',
'enrich', 'metrics', 'dimensional']

Expand Down
8 changes: 4 additions & 4 deletions core/helpers/notebook.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import papermill as pm
from core.helpers import project_root

from core.constants import ENV_BUCKET
root = project_root.ProjectRoot()

def run_transform(env: str, id: int, input_contract: str, output_contract: str) -> str:
Expand All @@ -22,10 +22,10 @@ def run_transform(env: str, id: int, input_contract: str, output_contract: str)

# TODO: figure out how else we're going to separate the notebook
def output_path(output_contract: str, transformation_name: str) -> str:
s3_prefix = "s3://ichain-dev-gluepoc/notebooks"
s3_prefix = f"s3://{ENV_BUCKET}/notebooks"
return f"{s3_prefix}/{output_contract}/{transformation_name}.ipynb"

def output_url(output_path: str) -> str:
s3_prefix = "s3://ichain-dev-gluepoc/notebooks"
s3_prefix = "s3://{ENV_BUCKET}/notebooks"
url_prefix = "http://notebook.integrichain.net/view"
return output_path.replace(s3_prefix, url_prefix)
return output_path.replace(s3_prefix, url_prefix)
12 changes: 12 additions & 0 deletions core_project.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
##TODO: env should ideally set these
ENVIRONMENT: dev
ENV_BUCKET: ichain-development

## constants
DOCKER_REPO: ichain/core
DEV_BUCKET: ichain-development
PROD_BUCKET: ichain-production
UAT_BUCKET: ichain-uat
AWS_ACCOUNT: 687531504312
AWS_REGION: us-east-1

19 changes: 9 additions & 10 deletions tests/integration/test_docker.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,12 @@
from docker.errors import ImageNotFound
from core.helpers import docker as core_docker
from botocore.exceptions import ClientError
from core.constants import AWS_ACCOUNT, DOCKER_REPO

docker_api_client = docker.APIClient(base_url='unix://var/run/docker.sock')
docker_client = docker.DockerClient(base_url='unix://var/run/docker.sock')
ecr_client = boto3.client('ecr')

AWS_ACCOUNT_ID = "687531504312"
AWS_BATCH_TEST_JOB_QUEUE = "prod_core"

# Generate a super basic container_overrides object for running the integration test
Expand Down Expand Up @@ -43,29 +43,28 @@ def generate_it_test_container_overrides():
# 8. Remove image from your machine

def test_integration_docker():
REPO_NAME = "ichain/core"
TAG = "it_test"

# 1. Build the image
full_tag = core_docker.build_image(f"{REPO_NAME}:{TAG}")
full_tag = core_docker.build_image(f"{DOCKER_REPO}:{TAG}")
test_image = docker_client.images.get(full_tag)

# 2. Log into ECR
with pytest.raises(ClientError):
core_docker.ecr_login("123456789012")
core_docker.ecr_login(AWS_ACCOUNT_ID)
core_docker.ecr_login(AWS_ACCOUNT)

# 3. Push the image to ECR
core_docker.register_image(TAG, REPO_NAME, AWS_ACCOUNT_ID)
ecr_tagged_image_name = core_docker.get_aws_repository(full_tag, AWS_ACCOUNT_ID)
core_docker.register_image(TAG, DOCKER_REPO, AWS_ACCOUNT)
ecr_tagged_image_name = core_docker.get_aws_repository(full_tag, AWS_ACCOUNT)
test_ecr_image = docker_client.images.get(ecr_tagged_image_name)
assert type(test_ecr_image) is Image

repo_digest = test_ecr_image.attrs['RepoDigests'][0]
digest_sha = repo_digest.split("@")[-1]
ecr_resp = ecr_client.describe_images(
registryId=AWS_ACCOUNT_ID,
repositoryName=REPO_NAME,
registryId=AWS_ACCOUNT,
repositoryName=DOCKER_REPO,
imageIds=[
{
'imageDigest': digest_sha,
Expand Down Expand Up @@ -98,9 +97,9 @@ def test_integration_docker():
core_docker.deregister_job_definition_set("it_test_core")

# 7. Remove image from ECR
core_docker.remove_ecr_image(TAG, REPO_NAME, AWS_ACCOUNT_ID)
core_docker.remove_ecr_image(TAG, DOCKER_REPO, AWS_ACCOUNT)

# 8. Remove image from your machine
core_docker.remove_image(full_tag)
with pytest.raises(ImageNotFound):
docker_client.images.get(full_tag)
docker_client.images.get(full_tag)
8 changes: 4 additions & 4 deletions tests/unit/test_moto_mocking.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,14 @@
import boto3
import moto
import pytest

from core.constants import AWS_REGION

@moto.mock_s3
def test_s3_is_working():
conn = boto3.resource('s3', region_name='us-east-1', aws_access_key_id="this_is_not_a_real_id",
conn = boto3.resource('s3', region_name=AWS_REGION, aws_access_key_id="this_is_not_a_real_id",
aws_secret_access_key="this_is_not_a_real_key")
conn.create_bucket(Bucket='mybucket')
client = boto3.client('s3', region_name='us-east-1')
client = boto3.client('s3', region_name=AWS_REGION)
client.put_object(Bucket='mybucket', Key='banana', Body='body stuff!')

body = conn.Object('mybucket', 'banana').get()[
Expand All @@ -25,6 +25,6 @@ def test_s3_is_working():

@moto.mock_secretsmanager
def test_secretsmanager_is_working():
conn = boto3.client('secretsmanager', region_name='us-east-1',
conn = boto3.client('secretsmanager', region_name=AWS_REGION,
aws_access_key_id="this_is_not_a_real_id", aws_secret_access_key="this_is_not_a_real_key")
conn.create_secret(Name='test_hamburger', SecretString='string')
11 changes: 6 additions & 5 deletions tests/unit/test_notebook.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import boto3
import pytest
import tempfile
from core.constants import ENV_BUCKET
import os
from io import TextIOWrapper
from core.helpers import notebook
Expand All @@ -10,26 +11,26 @@
class Test:
def setup(self):
s3_client = boto3.client('s3')
s3_client.create_bucket(Bucket="ichain-dev-gluepoc")
s3_client.create_bucket(Bucket=ENV_BUCKET)

def test_output_path(self):
self.setup()
output_contract = "asdf/1234/merp"
transformation_name = "shared.raw.extract"
output_path = notebook.output_path(output_contract, transformation_name)
assert output_path == "s3://ichain-dev-gluepoc/notebooks/asdf/1234/merp/shared.raw.extract.ipynb"
assert output_path == f"s3://{ENV_BUCKET}/notebooks/asdf/1234/merp/shared.raw.extract.ipynb"

def test_output_url(self):
self.setup()
output_path = "s3://ichain-dev-gluepoc/notebooks/asdf/1234/merp/shared.raw.extract.ipynb"
output_path = "s3://{ENV_BUCKET}/notebooks/asdf/1234/merp/shared.raw.extract.ipynb"
output_url = notebook.output_url(output_path)
assert output_url == "http://notebook.integrichain.net/view/asdf/1234/merp/shared.raw.extract.ipynb"

def test_run_transform(self):
self.setup()
s3 = boto3.resource('s3')

bucket = "ichain-dev-gluepoc"
bucket = ENV_BUCKET
key = "notebooks/dev/important_business/raw/extract/shared.raw.extract.ipynb"
notebook_url = notebook.run_transform("dev", 2, "dev/important_business/raw/ftp", "dev/important_business/raw/extract")

Expand All @@ -41,4 +42,4 @@ def test_run_transform(self):
# we could possibly create an actual test notebook with known
# expected outputs if we like, but that might be overkill
with open(test_file_location, 'r') as tmp_notebook:
assert type(tmp_notebook) is TextIOWrapper
assert type(tmp_notebook) is TextIOWrapper

0 comments on commit 1ed7675

Please sign in to comment.