From 2bbf5face63536dfac1f17e15bb8f32b23856211 Mon Sep 17 00:00:00 2001 From: Mitch Garnaat Date: Thu, 23 Apr 2015 08:26:16 -0700 Subject: [PATCH 1/7] WIP Commit. Updating to use new GA version of the Lambda API. Also moving from botocore to boto3. Also adding SNS example. No longer using CloudFormation for policies since we only need one and CloudFormation does not yet support managed policies. Haven't updated any tests yet so they will all be failing for now. Also need to update README. --- bin/kappa | 48 ++++---- kappa/aws.py | 15 ++- kappa/context.py | 81 ++++++++----- kappa/event_source.py | 69 +++++++++-- kappa/function.py | 55 ++++++--- kappa/policy.py | 87 ++++++++++++++ kappa/role.py | 99 ++++++++++++++++ kappa/stack.py | 143 ----------------------- requirements.txt | 4 +- samples/kinesis/ProcessKinesisRecords.js | 17 +-- samples/kinesis/config.yml | 11 +- samples/kinesis/input.json | 4 +- samples/sns/LambdaSNSSamplePolicy.json | 11 ++ samples/sns/config.yml | 32 +++++ samples/sns/dynamodb_table.json | 49 ++++++++ samples/sns/messageStore.js | 16 +++ samples/sns/resources.json | 38 ++++++ setup.py | 4 +- 18 files changed, 540 insertions(+), 243 deletions(-) create mode 100644 kappa/policy.py create mode 100644 kappa/role.py delete mode 100644 kappa/stack.py create mode 100644 samples/sns/LambdaSNSSamplePolicy.json create mode 100644 samples/sns/config.yml create mode 100644 samples/sns/dynamodb_table.json create mode 100644 samples/sns/messageStore.js create mode 100644 samples/sns/resources.json diff --git a/bin/kappa b/bin/kappa index 6e04848..2673bd7 100755 --- a/bin/kappa +++ b/bin/kappa @@ -13,6 +13,7 @@ # language governing permissions and limitations under the License. from datetime import datetime import logging +import base64 import click @@ -38,18 +39,20 @@ def cli(ctx, config=None, debug=False): @cli.command() @click.pass_context -def deploy(ctx): +def create(ctx): context = Context(ctx.obj['config'], ctx.obj['debug']) click.echo('deploying...') - context.deploy() + context.create() click.echo('...done') @cli.command() @click.pass_context -def test(ctx): +def invoke(ctx): context = Context(ctx.obj['config'], ctx.obj['debug']) - click.echo('testing...') - context.test() + click.echo('invoking...') + response = context.invoke() + log_data = base64.b64decode(response['LogResult']) + click.echo(log_data) click.echo('...done') @cli.command() @@ -67,31 +70,32 @@ def tail(ctx): def status(ctx): context = Context(ctx.obj['config'], ctx.obj['debug']) status = context.status() - click.echo(click.style('Stack', bold=True)) - if status['stack']: - for stack in status['stack']['Stacks']: - line = ' {}: {}'.format(stack['StackId'], stack['StackStatus']) - click.echo(click.style(line, fg='green')) - else: - click.echo(click.style(' None', fg='green')) + click.echo(click.style('Policy', bold=True)) + if status['policy']: + line = ' {} ({})'.format( + status['policy']['PolicyName'], + status['policy']['Arn']) + click.echo(click.style(line, fg='green')) + click.echo(click.style('Role', bold=True)) + if status['role']: + line = ' {} ({})'.format( + status['role']['Role']['RoleName'], + status['role']['Role']['Arn']) + click.echo(click.style(line, fg='green')) click.echo(click.style('Function', bold=True)) if status['function']: - line = ' {}'.format( - status['function']['Configuration']['FunctionName']) + line = ' {} ({})'.format( + status['function']['Configuration']['FunctionName'], + status['function']['Configuration']['FunctionArn']) click.echo(click.style(line, fg='green')) else: click.echo(click.style(' None', fg='green')) click.echo(click.style('Event Sources', bold=True)) if status['event_sources']: for event_source in status['event_sources']: - if 'EventSource' in event_source: - line = ' {}: {}'.format( - event_source['EventSource'], event_source['IsActive']) - click.echo(click.style(line, fg='green')) - else: - line = ' {}'.format( - event_source['CloudFunctionConfiguration']['Id']) - click.echo(click.style(line, fg='green')) + line = ' {}: {}'.format( + event_source['EventSourceArn'], event_source['State']) + click.echo(click.style(line, fg='green')) else: click.echo(click.style(' None', fg='green')) diff --git a/kappa/aws.py b/kappa/aws.py index 1ae7143..65a6986 100644 --- a/kappa/aws.py +++ b/kappa/aws.py @@ -1,4 +1,4 @@ -# Copyright (c) 2014 Mitch Garnaat http://garnaat.org/ +# Copyright (c) 2014,2015 Mitch Garnaat http://garnaat.org/ # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of @@ -11,21 +11,20 @@ # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. -import botocore.session +import boto3 class __AWS(object): - def __init__(self, profile=None, region=None): + def __init__(self, profile_name=None, region_name=None): self._client_cache = {} - self._session = botocore.session.get_session() - self._session.profile = profile - self._region = region + self._session = boto3.session.Session( + region_name=region_name, profile_name=profile_name) def create_client(self, client_name): if client_name not in self._client_cache: - self._client_cache[client_name] = self._session.create_client( - client_name, self._region) + self._client_cache[client_name] = self._session.client( + client_name) return self._client_cache[client_name] diff --git a/kappa/context.py b/kappa/context.py index ba4b064..d1a42d1 100644 --- a/kappa/context.py +++ b/kappa/context.py @@ -16,7 +16,8 @@ import kappa.function import kappa.event_source -import kappa.stack +import kappa.policy +import kappa.role LOG = logging.getLogger(__name__) @@ -32,8 +33,16 @@ def __init__(self, config_file, debug=False): else: self.set_logger('kappa', logging.INFO) self.config = yaml.load(config_file) - self._stack = kappa.stack.Stack( - self, self.config['cloudformation']) + if 'policy' in self.config.get('iam', ''): + self.policy = kappa.policy.Policy( + self, self.config['iam']['policy']) + else: + self.policy = None + if 'role' in self.config.get('iam', ''): + self.role = kappa.role.Role( + self, self.config['iam']['role']) + else: + self.role = None self.function = kappa.function.Function( self, self.config['lambda']) self.event_sources = [] @@ -57,11 +66,7 @@ def lambda_config(self): @property def exec_role_arn(self): - return self._stack.exec_role_arn - - @property - def invoke_role_arn(self): - return self._stack.invoke_role_arn + return self.role.arn def debug(self): self.set_logger('kappa', logging.DEBUG) @@ -90,44 +95,64 @@ def set_logger(self, logger_name, level=logging.INFO): log.addHandler(ch) def _create_event_sources(self): - for event_source_cfg in self.config['lambda']['event_sources']: - _, _, svc, _ = event_source_cfg['arn'].split(':', 3) - if svc == 'kinesis': - self.event_sources.append( - kappa.event_source.KinesisEventSource( + if 'event_sources' in self.config['lambda']: + for event_source_cfg in self.config['lambda']['event_sources']: + _, _, svc, _ = event_source_cfg['arn'].split(':', 3) + if svc == 'kinesis': + self.event_sources.append( + kappa.event_source.KinesisEventSource( + self, event_source_cfg)) + elif svc == 's3': + self.event_sources.append(kappa.event_source.S3EventSource( self, event_source_cfg)) - elif svc == 's3': - self.event_sources.append(kappa.event_source.S3EventSource( - self, event_source_cfg)) - else: - msg = 'Unsupported event source: %s' % event_source_cfg['arn'] - raise ValueError(msg) + elif svc == 'sns': + self.event_sources.append( + kappa.event_source.SNSEventSource(self, + event_source_cfg)) + else: + msg = 'Unknown event source: %s' % event_source_cfg['arn'] + raise ValueError(msg) def add_event_sources(self): for event_source in self.event_sources: event_source.add(self.function) - def deploy(self): - self._stack.update() - self.function.upload() + def create(self): + if self.policy: + self.policy.create() + if self.role: + self.role.create() + self.function.create() - def test(self): - self.function.test() + def invoke(self): + return self.function.invoke() def tail(self): return self.function.tail() def delete(self): - self._stack.delete() + if self.policy: + self.policy.delete() + if self.role: + self.role.delete() self.function.delete() for event_source in self.event_sources: event_source.remove(self.function) def status(self): status = {} - status['stack'] = self._stack.status() + if self.policy: + status['policy'] = self.policy.status() + else: + status['policy'] = None + if self.role: + status['role'] = self.role.status() + else: + status['role'] = None status['function'] = self.function.status() status['event_sources'] = [] - for event_source in self.event_sources: - status['event_sources'].append(event_source.status(self.function)) + if self.event_sources: + for event_source in self.event_sources: + status['event_sources'].append( + event_source.status(self.function)) return status diff --git a/kappa/event_source.py b/kappa/event_source.py index 89302cf..9b81a4d 100644 --- a/kappa/event_source.py +++ b/kappa/event_source.py @@ -30,6 +30,10 @@ def __init__(self, context, config): def arn(self): return self._config['arn'] + @property + def starting_position(self): + return self._config.get('starting_position', 'TRIM_HORIZON') + @property def batch_size(self): return self._config.get('batch_size', 100) @@ -44,21 +48,21 @@ def __init__(self, context, config): def _get_uuid(self, function): uuid = None - response = self._lambda.list_event_sources( + response = self._lambda.list_event_source_mappings( FunctionName=function.name, EventSourceArn=self.arn) LOG.debug(response) - if len(response['EventSources']) > 0: - uuid = response['EventSources'][0]['UUID'] + if len(response['EventSourceMappings']) > 0: + uuid = response['EventSourceMappings'][0]['UUID'] return uuid def add(self, function): try: - response = self._lambda.add_event_source( + response = self._lambda.create_event_source_mapping( FunctionName=function.name, - Role=self._context.invoke_role_arn, - EventSource=self.arn, - BatchSize=self.batch_size) + EventSourceArn=self.arn, + BatchSize=self.batch_size, + StartingPosition=self.starting_position) LOG.debug(response) except Exception: LOG.exception('Unable to add Kinesis event source') @@ -67,7 +71,7 @@ def remove(self, function): response = None uuid = self._get_uuid(function) if uuid: - response = self._lambda.remove_event_source( + response = self._lambda.delete_event_source_mapping( UUID=uuid) LOG.debug(response) return response @@ -75,7 +79,7 @@ def remove(self, function): def status(self, function): LOG.debug('getting status for event source %s', self.arn) try: - response = self._lambda.get_event_source( + response = self._lambda.get_event_source_mapping( UUID=self._get_uuid(function)) LOG.debug(response) except ClientError: @@ -134,3 +138,50 @@ def status(self, function): if 'CloudFunctionConfiguration' not in response: response = None return response + + +class SNSEventSource(EventSource): + + def __init__(self, context, config): + super(SNSEventSource, self).__init__(context, config) + aws = kappa.aws.get_aws(context) + self._sns = aws.create_client('sns') + + def _make_notification_id(self, function_name): + return 'Kappa-%s-notification' % function_name + + def exists(self, function): + try: + response = self._sns.list_subscriptions_by_topic( + TopicArn=self.arn) + LOG.debug(response) + for subscription in response['Subscriptions']: + if subscription['Endpoint'] == function.arn: + return subscription + return None + except Exception: + LOG.exception('Unable to find event source %s', self.arn) + + def add(self, function): + try: + response = self._sns.subscribe( + TopicArn=self.arn, Protocol='lambda', + Endpoint=function.arn) + LOG.debug(response) + except Exception: + LOG.exception('Unable to add SNS event source') + + def remove(self, function): + LOG.debug('removing SNS event source') + try: + subscription = self.exists(function) + if subscription: + response = self._sns.unsubscribe( + SubscriptionArn=subscription['SubscriptionArn']) + LOG.debug(response) + except Exception: + LOG.exception('Unable to remove event source %s', self.arn) + + def status(self, function): + LOG.debug('status for SNS notification for %s', function.name) + return self.exist(function) diff --git a/kappa/function.py b/kappa/function.py index 2c6ac22..3c409d5 100644 --- a/kappa/function.py +++ b/kappa/function.py @@ -45,10 +45,6 @@ def runtime(self): def handler(self): return self._config['handler'] - @property - def mode(self): - return self._config['mode'] - @property def description(self): return self._config['description'] @@ -73,14 +69,18 @@ def path(self): def test_data(self): return self._config['test_data'] + @property + def permissions(self): + return self._config.get('permissions', list()) + @property def arn(self): if self._arn is None: try: - response = self._lambda_svc.get_function_configuration( + response = self._lambda_svc.get_function( FunctionName=self.name) LOG.debug(response) - self._arn = response['FunctionARN'] + self._arn = response['Configuration']['FunctionArn'] except Exception: LOG.debug('Unable to find ARN for function: %s', self.name) return self._arn @@ -124,25 +124,45 @@ def zip_lambda_function(self, zipfile_name, lambda_fn): else: self._zip_lambda_file(zipfile_name, lambda_fn) - def upload(self): - LOG.debug('uploading %s', self.zipfile_name) + def add_permissions(self): + for permission in self.permissions: + try: + kwargs = { + 'FunctionName': self.name, + 'StatementId': permission['statement_id'], + 'Action': permission['action'], + 'Principal': permission['principal']} + source_arn = permission.get('source_arn', None) + if source_arn: + kwargs['SourceArn'] = source_arn + source_account = permission.get('source_account', None) + if source_account: + kwargs['SourceAccount'] = source_account + response = self._lambda_svc.add_permission(**kwargs) + LOG.debug(response) + except Exception: + LOG.exception('Unable to add permission') + + def create(self): + LOG.debug('creating %s', self.zipfile_name) self.zip_lambda_function(self.zipfile_name, self.path) with open(self.zipfile_name, 'rb') as fp: exec_role = self._context.exec_role_arn try: - response = self._lambda_svc.upload_function( + zipdata = fp.read() + response = self._lambda_svc.create_function( FunctionName=self.name, - FunctionZip=fp, + Code={'ZipFile': zipdata}, Runtime=self.runtime, Role=exec_role, Handler=self.handler, - Mode=self.mode, Description=self.description, Timeout=self.timeout, MemorySize=self.memory_size) LOG.debug(response) except Exception: LOG.exception('Unable to upload zip file') + self.add_permissions() def delete(self): LOG.debug('deleting function %s', self.name) @@ -169,5 +189,14 @@ def invoke_asynch(self, data_file): InvokeArgs=fp) LOG.debug(response) - def test(self): - self.invoke_asynch(self.test_data) + def invoke(self, test_data=None): + if test_data is None: + test_data = self.test_data + LOG.debug('invoke %s', test_data) + with open(test_data) as fp: + response = self._lambda_svc.invoke( + FunctionName=self.name, + LogType='Tail', + Payload=fp.read()) + LOG.debug(response) + return response diff --git a/kappa/policy.py b/kappa/policy.py new file mode 100644 index 0000000..76b38b3 --- /dev/null +++ b/kappa/policy.py @@ -0,0 +1,87 @@ +# Copyright (c) 2015 Mitch Garnaat http://garnaat.org/ +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import logging + +from botocore.exceptions import ClientError + +import kappa.aws + +LOG = logging.getLogger(__name__) + + +class Policy(object): + + Path = '/kappa/' + + def __init__(self, context, config): + self._context = context + self._config = config + aws = kappa.aws.get_aws(context) + self._iam_svc = aws.create_client('iam') + self._arn = None + + @property + def name(self): + return self._config['name'] + + @property + def description(self): + return self._config.get('description', None) + + @property + def document(self): + return self._config['document'] + + @property + def arn(self): + if self._arn is None: + policy = self.exists() + if policy: + self._arn = policy.get('Arn', None) + return self._arn + + def exists(self): + try: + response = self._iam_svc.list_policies(PathPrefix=self.Path) + LOG.debug(response) + for policy in response['Policies']: + if policy['PolicyName'] == self.name: + return policy + except Exception: + LOG.exception('Error listing policies') + return None + + def create(self): + LOG.debug('creating policy %s', self.name) + policy = self.exists() + if not policy: + with open(self.document, 'rb') as fp: + try: + response = self._iam_svc.create_policy( + Path=self.Path, PolicyName=self.name, + PolicyDocument=fp.read(), + Description=self.description) + LOG.debug(response) + except Exception: + LOG.exception('Error creating Policy') + + def delete(self): + LOG.debug('deleting policy %s', self.name) + response = self._iam_svc.delete_policy(PolicyArn=self.arn) + LOG.debug(response) + return response + + def status(self): + LOG.debug('getting status for policy %s', self.name) + return self.exists() diff --git a/kappa/role.py b/kappa/role.py new file mode 100644 index 0000000..f700fd5 --- /dev/null +++ b/kappa/role.py @@ -0,0 +1,99 @@ +# Copyright (c) 2015 Mitch Garnaat http://garnaat.org/ +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import logging + +from botocore.exceptions import ClientError + +import kappa.aws + +LOG = logging.getLogger(__name__) + + +AssumeRolePolicyDocument = """{ + "Version" : "2012-10-17", + "Statement": [ { + "Effect": "Allow", + "Principal": { + "Service": [ "lambda.amazonaws.com" ] + }, + "Action": [ "sts:AssumeRole" ] + } ] +}""" + + +class Role(object): + + Path = '/kappa/' + + def __init__(self, context, config): + self._context = context + self._config = config + aws = kappa.aws.get_aws(context) + self._iam_svc = aws.create_client('iam') + self._arn = None + + @property + def name(self): + return self._config['name'] + + @property + def arn(self): + if self._arn is None: + try: + response = self._iam_svc.get_role( + RoleName=self.name) + LOG.debug(response) + self._arn = response['Role']['Arn'] + except Exception: + LOG.debug('Unable to find ARN for role: %s', self.name) + return self._arn + + def exists(self): + try: + response = self._iam_svc.list_roles(PathPrefix=self.Path) + LOG.debug(response) + for role in response['Roles']: + if role['RoleName'] == self.name: + return role + except Exception: + LOG.exception('Error listing roles') + return None + + def create(self): + LOG.debug('creating role %s', self.name) + role = self.exists() + if not role: + try: + response = self._iam_svc.create_role( + Path=self.Path, RoleName=self.name, + AssumeRolePolicyDocument=AssumeRolePolicyDocument) + LOG.debug(response) + except Exception: + LOG.exception('Error creating Role') + + def delete(self): + LOG.debug('deleting role %s', self.name) + response = self._iam_svc.delete_role(RoleName=self.name) + LOG.debug(response) + return response + + def status(self): + LOG.debug('getting status for role %s', self.name) + try: + response = self._iam_svc.get_role(RoleName=self.name) + LOG.debug(response) + except ClientError: + LOG.debug('role %s not found', self.name) + response = None + return response diff --git a/kappa/stack.py b/kappa/stack.py deleted file mode 100644 index e71fb90..0000000 --- a/kappa/stack.py +++ /dev/null @@ -1,143 +0,0 @@ -# Copyright (c) 2014 Mitch Garnaat http://garnaat.org/ -# -# Licensed under the Apache License, Version 2.0 (the "License"). You -# may not use this file except in compliance with the License. A copy of -# the License is located at -# -# http://aws.amazon.com/apache2.0/ -# -# or in the "license" file accompanying this file. This file is -# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF -# ANY KIND, either express or implied. See the License for the specific -# language governing permissions and limitations under the License. - -import logging -import time - -import kappa.aws - -LOG = logging.getLogger(__name__) - - -class Stack(object): - - completed_states = ('CREATE_COMPLETE', 'UPDATE_COMPLETE') - failed_states = ('UPDATE_ROLLBACK_COMPLETE', 'ROLLBACK_COMPLETE') - - def __init__(self, context, config): - self._context = context - self._config = config - aws = kappa.aws.get_aws(self._context) - self._cfn = aws.create_client('cloudformation') - self._iam = aws.create_client('iam') - - @property - def name(self): - return self._config['stack_name'] - - @property - def template_path(self): - return self._config['template'] - - @property - def exec_role(self): - return self._config['exec_role'] - - @property - def exec_role_arn(self): - return self._get_role_arn(self.exec_role) - - @property - def invoke_role(self): - return self._config['invoke_role'] - - @property - def invoke_role_arn(self): - return self._get_role_arn(self.invoke_role) - - def _get_role_arn(self, role_name): - role_arn = None - try: - resources = self._cfn.list_stack_resources( - StackName=self.name) - LOG.debug(resources) - except Exception: - LOG.exception('Unable to find role ARN: %s', role_name) - for resource in resources['StackResourceSummaries']: - if resource['LogicalResourceId'] == role_name: - role = self._iam.get_role( - RoleName=resource['PhysicalResourceId']) - LOG.debug(role) - role_arn = role['Role']['Arn'] - LOG.debug('role_arn: %s', role_arn) - return role_arn - - def exists(self): - """ - Does Cloudformation Stack already exist? - """ - try: - response = self._cfn.describe_stacks(StackName=self.name) - LOG.debug('Stack %s exists', self.name) - except Exception: - LOG.debug('Stack %s does not exist', self.name) - response = None - return response - - def wait(self): - done = False - while not done: - time.sleep(1) - response = self._cfn.describe_stacks(StackName=self.name) - LOG.debug(response) - status = response['Stacks'][0]['StackStatus'] - LOG.debug('Stack status is: %s', status) - if status in self.completed_states: - done = True - if status in self.failed_states: - msg = 'Could not create stack %s: %s' % (self.name, status) - raise ValueError(msg) - - def _create(self): - LOG.debug('create_stack: stack_name=%s', self.name) - template_body = open(self.template_path).read() - try: - response = self._cfn.create_stack( - StackName=self.name, TemplateBody=template_body, - Capabilities=['CAPABILITY_IAM']) - LOG.debug(response) - except Exception: - LOG.exception('Unable to create stack') - self.wait() - - def _update(self): - LOG.debug('create_stack: stack_name=%s', self.name) - template_body = open(self.template_path).read() - try: - response = self._cfn.update_stack( - StackName=self.name, TemplateBody=template_body, - Capabilities=['CAPABILITY_IAM']) - LOG.debug(response) - except Exception as e: - if 'ValidationError' in str(e): - LOG.info('No Updates Required') - else: - LOG.exception('Unable to update stack') - self.wait() - - def update(self): - if self.exists(): - self._update() - else: - self._create() - - def status(self): - return self.exists() - - def delete(self): - LOG.debug('delete_stack: stack_name=%s', self.name) - try: - response = self._cfn.delete_stack(StackName=self.name) - LOG.debug(response) - except Exception: - LOG.exception('Unable to delete stack: %s', self.name) diff --git a/requirements.txt b/requirements.txt index 1a0a7a9..868c33d 100755 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ -botocore==0.94.0 -click==3.3 +boto3==0.0.15 +click==4.0 PyYAML>=3.11 mock>=1.0.1 nose==1.3.1 diff --git a/samples/kinesis/ProcessKinesisRecords.js b/samples/kinesis/ProcessKinesisRecords.js index 6d833e7..4b200c6 100644 --- a/samples/kinesis/ProcessKinesisRecords.js +++ b/samples/kinesis/ProcessKinesisRecords.js @@ -1,10 +1,11 @@ -console.log('Loading event'); +console.log('Loading function'); + exports.handler = function(event, context) { - console.log(JSON.stringify(event, null, ' ')); - for(i = 0; i < event.Records.length; ++i) { - encodedPayload = event.Records[i].kinesis.data; - payload = new Buffer(encodedPayload, 'base64').toString('ascii'); - console.log("Decoded payload: " + payload); - } - context.done(null, "Hello World"); // SUCCESS with message + console.log(JSON.stringify(event, null, 2)); + event.Records.forEach(function(record) { + // Kinesis data is base64 encoded so decode here + payload = new Buffer(record.kinesis.data, 'base64').toString('ascii'); + console.log('Decoded payload:', payload); + }); + context.succeed(); }; diff --git a/samples/kinesis/config.yml b/samples/kinesis/config.yml index 9c2a828..edca66e 100644 --- a/samples/kinesis/config.yml +++ b/samples/kinesis/config.yml @@ -1,11 +1,9 @@ --- profile: personal region: us-east-1 -cloudformation: - template: roles.cf - stack_name: TestKinesis - exec_role: ExecRole - invoke_role: InvokeRole +iam: + role_name: KinesisSampleRole + role_policy: AWSLambdaKinesisExecutionRole lambda: name: KinesisSample zipfile_name: KinesisSample.zip @@ -15,9 +13,10 @@ lambda: runtime: nodejs memory_size: 128 timeout: 3 - mode: event event_sources: - arn: arn:aws:kinesis:us-east-1:084307701560:stream/lambdastream + starting_position: TRIM_HORIZON + batch_size: 100 test_data: input.json \ No newline at end of file diff --git a/samples/kinesis/input.json b/samples/kinesis/input.json index 60341d9..fe2f9ef 100644 --- a/samples/kinesis/input.json +++ b/samples/kinesis/input.json @@ -12,8 +12,8 @@ "invokeIdentityArn": "arn:aws:iam::059493405231:role/testLEBRole", "eventVersion": "1.0", "eventName": "aws:kinesis:record", - "eventSourceARN": "arn:aws:kinesis:us-east-1:35667example:stream/examplestream", - "awsRegion": "us-east-1" + "eventSourceARN": "arn:aws:kinesis:us-west-2:35667example:stream/examplestream", + "awsRegion": "us-west-2" } ] } diff --git a/samples/sns/LambdaSNSSamplePolicy.json b/samples/sns/LambdaSNSSamplePolicy.json new file mode 100644 index 0000000..9d98958 --- /dev/null +++ b/samples/sns/LambdaSNSSamplePolicy.json @@ -0,0 +1,11 @@ +{ + "Version": "2012-10-17", + "Statement":[ + { + "Sid":"Stmt1428510662000", + "Effect":"Allow", + "Action":["dynamodb:*"], + "Resource":["arn:aws:dynamodb:us-east-1:084307701560:table/snslambda"] + } + ] +} diff --git a/samples/sns/config.yml b/samples/sns/config.yml new file mode 100644 index 0000000..ee3648d --- /dev/null +++ b/samples/sns/config.yml @@ -0,0 +1,32 @@ +--- +profile: personal +region: us-east-1 +resources: resources.json +iam: + policy: + description: A policy used with the Kappa SNS->DynamoDB example + name: LambdaSNSSamplePolicy + document: LambdaSNSSamplePolicy.json + role: + name: SNSSampleRole + policy: LambdaSNSSamplePolicy +lambda: + name: SNSSample + zipfile_name: SNSSample.zip + description: Testing SNS -> DynamoDB Lambda handler + path: messageStore.js + handler: messageStore.handler + runtime: nodejs + memory_size: 128 + timeout: 3 + permissions: + - + statement_id: sns_invoke + action: lambda:invokeFunction + principal: sns.amazonaws.com + source_arn: arn:aws:sns:us-east-1:084307701560:lambda_topic + event_sources: + - + arn: arn:aws:sns:us-east-1:084307701560:lambda_topic + test_data: input.json + \ No newline at end of file diff --git a/samples/sns/dynamodb_table.json b/samples/sns/dynamodb_table.json new file mode 100644 index 0000000..3ffe747 --- /dev/null +++ b/samples/sns/dynamodb_table.json @@ -0,0 +1,49 @@ +{ + "TableName": "snslambda", + "AttributeDefinitions": [ + { + "AttributeName": "SnsTopicArn", + "AttributeType": "S" + }, + { + "AttributeName": "SnsPublishTime", + "AttributeType": "S" + }, + { + "AttributeName": "SnsMessageId", + "AttributeType": "S" + } + ], + "KeySchema": [ + { + "AttributeName": "SnsTopicArn", + "KeyType": "HASH" + }, + { + "AttributeName": "SnsPublishTime", + "KeyType": "RANGE" + } + ], + "GlobalSecondaryIndexes": [ + { + "IndexName": "MesssageIndex", + "KeySchema": [ + { + "AttributeName": "SnsMessageId", + "KeyType": "HASH" + } + ], + "Projection": { + "ProjectionType": "ALL" + }, + "ProvisionedThroughput": { + "ReadCapacityUnits": 5, + "WriteCapacityUnits": 1 + } + } + ], + "ProvisionedThroughput": { + "ReadCapacityUnits": 5, + "WriteCapacityUnits": 5 + } +} diff --git a/samples/sns/messageStore.js b/samples/sns/messageStore.js new file mode 100644 index 0000000..1947016 --- /dev/null +++ b/samples/sns/messageStore.js @@ -0,0 +1,16 @@ +console.log('Loading event'); +var aws = require('aws-sdk'); +var ddb = new aws.DynamoDB({params: {TableName: 'snslambda'}}); + +exports.handler = function(event, context) { + var SnsMessageId = event.Records[0].Sns.MessageId; + var SnsPublishTime = event.Records[0].Sns.Timestamp; + var SnsTopicArn = event.Records[0].Sns.TopicArn; + var LambdaReceiveTime = new Date().toString(); + var itemParams = {Item: {SnsTopicArn: {S: SnsTopicArn}, + SnsPublishTime: {S: SnsPublishTime}, SnsMessageId: {S: SnsMessageId}, + LambdaReceiveTime: {S: LambdaReceiveTime} }}; + ddb.putItem(itemParams, function() { + context.done(null,''); + }); +}; diff --git a/samples/sns/resources.json b/samples/sns/resources.json new file mode 100644 index 0000000..06f7e75 --- /dev/null +++ b/samples/sns/resources.json @@ -0,0 +1,38 @@ +{ + "AWSTemplateFormatVersion" : "2010-09-09", + + "Description" : "Creates the DynamoDB Table needed for the example", + + "Resources" : { + "snslambda" : { + "Type" : "AWS::DynamoDB::Table", + "Properties" : { + "AttributeDefinitions": [ + { + "AttributeName" : "SnsTopicArn", + "AttributeType" : "S" + }, + { + "AttributeName" : "SnsPublishTime", + "AttributeType" : "S" + } + ], + "KeySchema": [ + { "AttributeName": "SnsTopicArn", "KeyType": "HASH" }, + { "AttributeName": "SnsPublishTime", "KeyType": "RANGE" } + ], + "ProvisionedThroughput" : { + "ReadCapacityUnits" : 5, + "WriteCapacityUnits" : 5 + } + } + } + }, + + "Outputs" : { + "TableName" : { + "Value" : {"Ref" : "snslambda"}, + "Description" : "Table name of the newly created DynamoDB table" + } + } +} diff --git a/setup.py b/setup.py index 2d4ebf9..e0f8028 100755 --- a/setup.py +++ b/setup.py @@ -5,8 +5,8 @@ import os requires = [ - 'botocore==0.94.0', - 'click==3.3', + 'boto3==0.0.15', + 'click==4.0', 'PyYAML>=3.11' ] From 405523c215c1edcc4622952f2f64fae9f9e66287 Mon Sep 17 00:00:00 2001 From: Mitch Garnaat Date: Sun, 26 Apr 2015 10:10:19 -0700 Subject: [PATCH 2/7] Another WIP commit on the road to an update for the new Lambda API. --- bin/kappa | 21 +++++++++++++++------ kappa/context.py | 29 ++++++++++++++++++++++------- kappa/event_source.py | 24 +++++++++++++++++------- kappa/function.py | 22 ++++++++++++++++++++-- kappa/policy.py | 8 +++++--- kappa/role.py | 21 ++++++++++++++++++--- 6 files changed, 97 insertions(+), 28 deletions(-) diff --git a/bin/kappa b/bin/kappa index 2673bd7..e8037b0 100755 --- a/bin/kappa +++ b/bin/kappa @@ -41,10 +41,18 @@ def cli(ctx, config=None, debug=False): @click.pass_context def create(ctx): context = Context(ctx.obj['config'], ctx.obj['debug']) - click.echo('deploying...') + click.echo('creating...') context.create() click.echo('...done') +@cli.command() +@click.pass_context +def update_code(ctx): + context = Context(ctx.obj['config'], ctx.obj['debug']) + click.echo('updating code...') + context.update_code() + click.echo('...done') + @cli.command() @click.pass_context def invoke(ctx): @@ -93,11 +101,12 @@ def status(ctx): click.echo(click.style('Event Sources', bold=True)) if status['event_sources']: for event_source in status['event_sources']: - line = ' {}: {}'.format( - event_source['EventSourceArn'], event_source['State']) - click.echo(click.style(line, fg='green')) - else: - click.echo(click.style(' None', fg='green')) + if event_source: + line = ' {}: {}'.format( + event_source['EventSourceArn'], event_source['State']) + click.echo(click.style(line, fg='green')) + else: + click.echo(click.style(' None', fg='green')) @cli.command() @click.pass_context diff --git a/kappa/context.py b/kappa/context.py index d1a42d1..45df69a 100644 --- a/kappa/context.py +++ b/kappa/context.py @@ -13,6 +13,7 @@ import logging import yaml +import time import kappa.function import kappa.event_source @@ -107,8 +108,12 @@ def _create_event_sources(self): self, event_source_cfg)) elif svc == 'sns': self.event_sources.append( - kappa.event_source.SNSEventSource(self, - event_source_cfg)) + kappa.event_source.SNSEventSource( + self, event_source_cfg)) + elif svc == 'dynamodb': + self.event_sources.append( + kappa.event_source.DynamoDBStreamEventSource( + self, event_source_cfg)) else: msg = 'Unknown event source: %s' % event_source_cfg['arn'] raise ValueError(msg) @@ -122,8 +127,16 @@ def create(self): self.policy.create() if self.role: self.role.create() + # There is a consistency problem here. + # If you don't wait for a bit, the function.create call + # will fail because the policy has not been attached to the role. + LOG.debug('Waiting for policy/role propogation') + time.sleep(5) self.function.create() + def update_code(self): + self.function.update() + def invoke(self): return self.function.invoke() @@ -131,13 +144,15 @@ def tail(self): return self.function.tail() def delete(self): - if self.policy: - self.policy.delete() - if self.role: - self.role.delete() - self.function.delete() for event_source in self.event_sources: event_source.remove(self.function) + self.function.delete() + time.sleep(5) + if self.role: + self.role.delete() + time.sleep(5) + if self.policy: + self.policy.delete() def status(self): status = {} diff --git a/kappa/event_source.py b/kappa/event_source.py index 9b81a4d..207de81 100644 --- a/kappa/event_source.py +++ b/kappa/event_source.py @@ -77,17 +77,27 @@ def remove(self, function): return response def status(self, function): + response = None LOG.debug('getting status for event source %s', self.arn) - try: - response = self._lambda.get_event_source_mapping( - UUID=self._get_uuid(function)) - LOG.debug(response) - except ClientError: - LOG.debug('event source %s does not exist', self.arn) - response = None + uuid = self._get_uuid(function) + if uuid: + try: + response = self._lambda.get_event_source_mapping( + UUID=self._get_uuid(function)) + LOG.debug(response) + except ClientError: + LOG.debug('event source %s does not exist', self.arn) + response = None + else: + LOG.debug('No UUID for event source %s', self.arn) return response +class DynamoDBStreamEventSource(KinesisEventSource): + + pass + + class S3EventSource(EventSource): def __init__(self, context, config): diff --git a/kappa/function.py b/kappa/function.py index 3c409d5..a3e6929 100644 --- a/kappa/function.py +++ b/kappa/function.py @@ -148,6 +148,7 @@ def create(self): self.zip_lambda_function(self.zipfile_name, self.path) with open(self.zipfile_name, 'rb') as fp: exec_role = self._context.exec_role_arn + LOG.debug('exec_role=%s', exec_role) try: zipdata = fp.read() response = self._lambda_svc.create_function( @@ -164,10 +165,27 @@ def create(self): LOG.exception('Unable to upload zip file') self.add_permissions() + def update(self): + LOG.debug('updating %s', self.zipfile_name) + self.zip_lambda_function(self.zipfile_name, self.path) + with open(self.zipfile_name, 'rb') as fp: + try: + zipdata = fp.read() + response = self._lambda_svc.update_function_code( + FunctionName=self.name, + ZipFile=zipdata) + LOG.debug(response) + except Exception: + LOG.exception('Unable to update zip file') + def delete(self): LOG.debug('deleting function %s', self.name) - response = self._lambda_svc.delete_function(FunctionName=self.name) - LOG.debug(response) + response = None + try: + response = self._lambda_svc.delete_function(FunctionName=self.name) + LOG.debug(response) + except ClientError: + LOG.debug('function %s: not found', self.name) return response def status(self): diff --git a/kappa/policy.py b/kappa/policy.py index 76b38b3..f0a3300 100644 --- a/kappa/policy.py +++ b/kappa/policy.py @@ -77,9 +77,11 @@ def create(self): LOG.exception('Error creating Policy') def delete(self): - LOG.debug('deleting policy %s', self.name) - response = self._iam_svc.delete_policy(PolicyArn=self.arn) - LOG.debug(response) + response = None + if self.arn: + LOG.debug('deleting policy %s', self.name) + response = self._iam_svc.delete_policy(PolicyArn=self.arn) + LOG.debug(response) return response def status(self): diff --git a/kappa/role.py b/kappa/role.py index f700fd5..1608f12 100644 --- a/kappa/role.py +++ b/kappa/role.py @@ -79,13 +79,28 @@ def create(self): Path=self.Path, RoleName=self.name, AssumeRolePolicyDocument=AssumeRolePolicyDocument) LOG.debug(response) - except Exception: + if self._context.policy: + response = self._iam_svc.attach_role_policy( + RoleName=self.name, + PolicyArn=self._context.policy.arn) + LOG.debug(response) + except ClientError: LOG.exception('Error creating Role') def delete(self): + response = None LOG.debug('deleting role %s', self.name) - response = self._iam_svc.delete_role(RoleName=self.name) - LOG.debug(response) + try: + LOG.debug('First detach the policy from the role') + policy_arn = self._context.policy.arn + if policy_arn: + response = self._iam_svc.detach_role_policy( + RoleName=self.name, PolicyArn=policy_arn) + LOG.debug(response) + response = self._iam_svc.delete_role(RoleName=self.name) + LOG.debug(response) + except ClientError: + LOG.exception('role %s not found', self.name) return response def status(self): From 5fbe8cfff544ac91ecdd25147f094abe982dfd54 Mon Sep 17 00:00:00 2001 From: Mitch Garnaat Date: Sun, 26 Apr 2015 20:08:46 -0700 Subject: [PATCH 3/7] More WIP changes to get current with GA release of Lambda. --- bin/kappa | 26 ++++++++++++++++++++++++++ kappa/context.py | 10 ++++++++++ kappa/event_source.py | 23 +++++++++++++++++++++-- kappa/function.py | 12 +++++++++++- 4 files changed, 68 insertions(+), 3 deletions(-) diff --git a/bin/kappa b/bin/kappa index e8037b0..7c7cddc 100755 --- a/bin/kappa +++ b/bin/kappa @@ -63,6 +63,24 @@ def invoke(ctx): click.echo(log_data) click.echo('...done') +@cli.command() +@click.pass_context +def dryrun(ctx): + context = Context(ctx.obj['config'], ctx.obj['debug']) + click.echo('invoking dryrun...') + response = context.dryrun() + click.echo(response) + click.echo('...done') + +@cli.command() +@click.pass_context +def invoke_async(ctx): + context = Context(ctx.obj['config'], ctx.obj['debug']) + click.echo('invoking async...') + response = context.invoke_async() + click.echo(response) + click.echo('...done') + @cli.command() @click.pass_context def tail(ctx): @@ -124,6 +142,14 @@ def add_event_sources(ctx): context.add_event_sources() click.echo('...done') +@cli.command() +@click.pass_context +def update_event_sources(ctx): + context = Context(ctx.obj['config'], ctx.obj['debug']) + click.echo('updating event sources...') + context.update_event_sources() + click.echo('...done') + if __name__ == '__main__': cli(obj={}) diff --git a/kappa/context.py b/kappa/context.py index 45df69a..9a0985a 100644 --- a/kappa/context.py +++ b/kappa/context.py @@ -122,6 +122,10 @@ def add_event_sources(self): for event_source in self.event_sources: event_source.add(self.function) + def update_event_sources(self): + for event_source in self.event_sources: + event_source.update(self.function) + def create(self): if self.policy: self.policy.create() @@ -140,6 +144,12 @@ def update_code(self): def invoke(self): return self.function.invoke() + def dryrun(self): + return self.function.dryrun() + + def invoke_async(self): + return self.function.invoke_async() + def tail(self): return self.function.tail() diff --git a/kappa/event_source.py b/kappa/event_source.py index 207de81..45620f6 100644 --- a/kappa/event_source.py +++ b/kappa/event_source.py @@ -38,6 +38,10 @@ def starting_position(self): def batch_size(self): return self._config.get('batch_size', 100) + @property + def enabled(self): + return self._config.get('enabled', True) + class KinesisEventSource(EventSource): @@ -62,10 +66,25 @@ def add(self, function): FunctionName=function.name, EventSourceArn=self.arn, BatchSize=self.batch_size, - StartingPosition=self.starting_position) + StartingPosition=self.starting_position, + Enabled=self.enabled + ) LOG.debug(response) except Exception: - LOG.exception('Unable to add Kinesis event source') + LOG.exception('Unable to add event source') + + def update(self, function): + response = None + uuid = self._get_uuid(function) + if uuid: + try: + response = self._lambda.update_event_source_mapping( + BatchSize=self.batch_size, + Enabled=self.enabled, + FunctionName=function.arn) + LOG.debug(response) + except Exception: + LOG.exception('Unable to update event source') def remove(self, function): response = None diff --git a/kappa/function.py b/kappa/function.py index a3e6929..cbe4be4 100644 --- a/kappa/function.py +++ b/kappa/function.py @@ -207,14 +207,24 @@ def invoke_asynch(self, data_file): InvokeArgs=fp) LOG.debug(response) - def invoke(self, test_data=None): + def _invoke(self, test_data, invocation_type): if test_data is None: test_data = self.test_data LOG.debug('invoke %s', test_data) with open(test_data) as fp: response = self._lambda_svc.invoke( FunctionName=self.name, + InvocationType=invocation_type, LogType='Tail', Payload=fp.read()) LOG.debug(response) return response + + def invoke(self, test_data=None): + return self._invoke(test_data, 'RequestResponse') + + def invoke_async(self, test_data=None): + return self._invoke(test_data, 'Event') + + def dryrun(self, test_data=None): + return self._invoke(test_data, 'DryRun') From 0848c084ff358c906ffb54da04cbc7a86de71d5c Mon Sep 17 00:00:00 2001 From: Mitch Garnaat Date: Mon, 27 Apr 2015 06:11:29 -0700 Subject: [PATCH 4/7] Add debug logging about attaching policy to role. --- kappa/role.py | 1 + 1 file changed, 1 insertion(+) diff --git a/kappa/role.py b/kappa/role.py index 1608f12..2215cd9 100644 --- a/kappa/role.py +++ b/kappa/role.py @@ -80,6 +80,7 @@ def create(self): AssumeRolePolicyDocument=AssumeRolePolicyDocument) LOG.debug(response) if self._context.policy: + LOG.debug('attaching policy %s', self._context.policy.arn) response = self._iam_svc.attach_role_policy( RoleName=self.name, PolicyArn=self._context.policy.arn) From e1ef0a127506e93b7ce44e522b7eea8497c22ab3 Mon Sep 17 00:00:00 2001 From: Mitch Garnaat Date: Mon, 27 Apr 2015 07:49:09 -0700 Subject: [PATCH 5/7] Updated README.md file based on new commands. --- README.md | 49 +++++++++++++++++++++++++++++++------------------ 1 file changed, 31 insertions(+), 18 deletions(-) diff --git a/README.md b/README.md index 8e187a7..8825b1e 100644 --- a/README.md +++ b/README.md @@ -12,37 +12,52 @@ There are quite a few steps involved in developing a Lambda function. You have to: * Write the function itself (Javascript only for now) -* Create the IAM roles required by the Lambda function itself (the executing -role) as well as the policy required by whoever is invoking the Lambda -function (the invocation role) +* Create the IAM role required by the Lambda function itself (the executing +role) to allow it access to any resources it needs to do its job +* Add additional permissions to the Lambda function if it is going to be used +in a Push model (e.g. S3, SNS) rather than a Pull model. * Zip the function and any dependencies and upload it to AWS Lambda * Test the function with mock data * Retrieve the output of the function from CloudWatch Logs * Add an event source to the function * View the output of the live function -Kappa tries to help you with some of this. The IAM roles are created -in a CloudFormation template and kappa takes care of creating, updating, and -deleting the CloudFormation stack. Kappa will also zip up the function and +Kappa tries to help you with some of this. It allows you to create an IAM +managed policy or use an existing one. It creates the IAM execution role for +you and associates the policy with it. Kappa will zip up the function and any dependencies and upload them to AWS Lambda. It also sends test data to the uploaded function and finds the related CloudWatch log stream and displays the log events. Finally, it will add the event source to turn your function on. +If you need to make changes, kappa will allow you to easily update your Lambda +function with new code or update your event sources as needed. + +Getting Started +--------------- + Kappa is a command line tool. The basic command format is: kappa [optional command args] Where ``command`` is one of: -* deploy - deploy the CloudFormation template containing the IAM roles and zip - the function and upload it to AWS Lambda -* test - send test data to the new Lambda function +* create - creates the IAM policy (if necessary), the IAM role, and zips and + uploads the Lambda function code to the Lambda service +* invoke - make a synchronous call to your Lambda function, passing test data + and display the resulting log data +* invoke_async - make an asynchronous call to your Lambda function passing test + data. +* dryrun - make the call but only check things like permissions and report + back. Don't actually run the code. * tail - display the most recent log events for the function (remember that it can take several minutes before log events are available from CloudWatch) * add-event-sources - hook up an event source to your Lambda function -* delete - delete the CloudFormation stack containing the IAM roles and delete - the Lambda function +* delete - delete the Lambda function, remove any event sources, delete the IAM + policy and role +* update_code - Upload new code for your Lambda function +* update_event_sources - Update the event sources based on the information in + your kappa config file * status - display summary information about functions, stacks, and event sources related to your project. @@ -58,14 +73,12 @@ An example project based on a Kinesis stream can be found in The basic workflow is: * Create your Lambda function -* Create your CloudFormation template with the execution and invocation roles +* Create any custom IAM policy you need to execute your Lambda function * Create some sample data * Create the YAML config file with all of the information -* Run ``kappa deploy`` to create roles and upload function -* Run ``kappa test`` to invoke the function with test data -* Run ``kappa tail`` to view the functions output in CloudWatch logs +* Run ``kappa create`` to create roles and upload function +* Run ``kappa invoke`` to invoke the function with test data +* Run ``kappa update_code`` to upload new code for your Lambda + function * Run ``kappa add-event-source`` to hook your function up to the event source * Run ``kappa tail`` to see more output - -If you have to make changes in your function or in your IAM roles, simply run -``kappa deploy`` again and the changes will be uploaded as necessary. From 4c802ad7d1e0b8dcdf0db4896e983f2ededcdd76 Mon Sep 17 00:00:00 2001 From: Mitch Garnaat Date: Mon, 27 Apr 2015 16:56:52 -0700 Subject: [PATCH 6/7] Rewriting some tests and also rewriting the MockAWS module to automatically map all responses in responses.py to mocks in the client. --- kappa/policy.py | 2 - tests/unit/data/BazPolicy.json | 12 ++++ tests/unit/data/roles.cf | 0 tests/unit/mock_aws.py | 45 +++++---------- tests/unit/responses.py | 58 ++++++++++++++++---- tests/unit/{test_stack.py => test_policy.py} | 52 ++++++++++-------- tests/unit/test_role.py | 58 ++++++++++++++++++++ 7 files changed, 161 insertions(+), 66 deletions(-) create mode 100644 tests/unit/data/BazPolicy.json delete mode 100644 tests/unit/data/roles.cf rename tests/unit/{test_stack.py => test_policy.py} (51%) create mode 100644 tests/unit/test_role.py diff --git a/kappa/policy.py b/kappa/policy.py index f0a3300..69856e5 100644 --- a/kappa/policy.py +++ b/kappa/policy.py @@ -13,8 +13,6 @@ import logging -from botocore.exceptions import ClientError - import kappa.aws LOG = logging.getLogger(__name__) diff --git a/tests/unit/data/BazPolicy.json b/tests/unit/data/BazPolicy.json new file mode 100644 index 0000000..cbaf0cd --- /dev/null +++ b/tests/unit/data/BazPolicy.json @@ -0,0 +1,12 @@ +{ + "Statement":[ + {"Condition": + {"ArnLike":{"AWS:SourceArn":"arn:aws:sns:us-east-1:123456789012:lambda_topic"}}, + "Resource":"arn:aws:lambda:us-east-1:123456789023:function:messageStore", + "Action":"lambda:invokeFunction", + "Principal":{"Service":"sns.amazonaws.com"}, + "Sid":"sns invoke","Effect":"Allow" + }], + "Id":"default", + "Version":"2012-10-17" +} diff --git a/tests/unit/data/roles.cf b/tests/unit/data/roles.cf deleted file mode 100644 index e69de29..0000000 diff --git a/tests/unit/mock_aws.py b/tests/unit/mock_aws.py index aa6369f..b0da5ab 100644 --- a/tests/unit/mock_aws.py +++ b/tests/unit/mock_aws.py @@ -1,3 +1,5 @@ +import inspect + import mock import tests.unit.responses as responses @@ -6,40 +8,23 @@ class MockAWS(object): def __init__(self, profile=None, region=None): - pass + self.response_map = {} + for name, value in inspect.getmembers(responses): + if name.startswith('__'): + continue + if '_' in name: + service_name, request_name = name.split('_', 1) + if service_name not in self.response_map: + self.response_map[service_name] = {} + self.response_map[service_name][request_name] = value def create_client(self, client_name): client = None - if client_name == 'logs': - client = mock.Mock() - choices = responses.logs_describe_log_groups - client.describe_log_groups = mock.Mock( - side_effect=choices) - choices = responses.logs_describe_log_streams - client.describe_log_streams = mock.Mock( - side_effect=choices) - choices = responses.logs_get_log_events - client.get_log_events = mock.Mock( - side_effect=choices) - if client_name == 'cloudformation': - client = mock.Mock() - choices = responses.cfn_list_stack_resources - client.list_stack_resources = mock.Mock( - side_effect=choices) - choices = responses.cfn_describe_stacks - client.describe_stacks = mock.Mock( - side_effect=choices) - choices = responses.cfn_create_stack - client.create_stack = mock.Mock( - side_effect=choices) - choices = responses.cfn_delete_stack - client.delete_stack = mock.Mock( - side_effect=choices) - if client_name == 'iam': + if client_name in self.response_map: client = mock.Mock() - choices = responses.iam_get_role - client.get_role = mock.Mock( - side_effect=choices) + for request in self.response_map[client_name]: + response = self.response_map[client_name][request] + setattr(client, request, mock.Mock(side_effect=response)) return client diff --git a/tests/unit/responses.py b/tests/unit/responses.py index 9b2f1fd..39118a6 100644 --- a/tests/unit/responses.py +++ b/tests/unit/responses.py @@ -1,10 +1,56 @@ import datetime from dateutil.tz import tzutc -cfn_list_stack_resources = [{'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': 'dd35f0ef-9699-11e4-ba38-c355c9515dbc'}, u'StackResourceSummaries': [{u'ResourceStatus': 'CREATE_COMPLETE', u'ResourceType': 'AWS::IAM::Role', u'ResourceStatusReason': None, u'LastUpdatedTimestamp': datetime.datetime(2015, 1, 6, 17, 37, 54, 861000, tzinfo=tzutc()), u'PhysicalResourceId': 'TestKinesis-InvokeRole-IF6VUXY9MBJN', u'LogicalResourceId': 'InvokeRole'}, {u'ResourceStatus': 'CREATE_COMPLETE', u'ResourceType': 'AWS::IAM::Role', u'ResourceStatusReason': None, u'LastUpdatedTimestamp': datetime.datetime(2015, 1, 6, 17, 37, 55, 18000, tzinfo=tzutc()), u'PhysicalResourceId': 'TestKinesis-ExecRole-567SAV6TZOET', u'LogicalResourceId': 'ExecRole'}, {u'ResourceStatus': 'CREATE_COMPLETE', u'ResourceType': 'AWS::IAM::Policy', u'ResourceStatusReason': None, u'LastUpdatedTimestamp': datetime.datetime(2015, 1, 6, 17, 37, 58, 120000, tzinfo=tzutc()), u'PhysicalResourceId': 'TestK-Invo-OMW5SDLQM8FM', u'LogicalResourceId': 'InvokeRolePolicies'}, {u'ResourceStatus': 'CREATE_COMPLETE', u'ResourceType': 'AWS::IAM::Policy', u'ResourceStatusReason': None, u'LastUpdatedTimestamp': datetime.datetime(2015, 1, 6, 17, 37, 58, 454000, tzinfo=tzutc()), u'PhysicalResourceId': 'TestK-Exec-APWRVKTBPPPT', u'LogicalResourceId': 'ExecRolePolicies'}]}] +iam_list_policies = [{u'IsTruncated': True, + u'Marker': 'ABcyoYmSlphARcitCJruhVIxKW3Hg1LJD3Fm4LAW8iGKykrSNrApiUoz2rjIuNiLJpT6JtUgP5M7wTuPZcHu1KsvMarvgFBFQObTPSa4WF22Zg==', + u'Policies': [{u'Arn': 'arn:aws:iam::123456789012:policy/FooPolicy', + u'AttachmentCount': 0, + u'CreateDate': datetime.datetime(2015, 2, 24, 3, 16, 24, tzinfo=tzutc()), + u'DefaultVersionId': 'v2', + u'IsAttachable': True, + u'Path': '/', + u'PolicyId': 'ANPAJHWE6R7YT7PLAH3KG', + u'PolicyName': 'FooPolicy', + u'UpdateDate': datetime.datetime(2015, 2, 25, 0, 19, 12, tzinfo=tzutc())}, + {u'Arn': 'arn:aws:iam::123456789012:policy/BarPolicy', + u'AttachmentCount': 1, + u'CreateDate': datetime.datetime(2015, 2, 25, 0, 11, 57, tzinfo=tzutc()), + u'DefaultVersionId': 'v2', + u'IsAttachable': True, + u'Path': '/', + u'PolicyId': 'ANPAJU7MVBQXOQTVQN3VM', + u'PolicyName': 'BarPolicy', + u'UpdateDate': datetime.datetime(2015, 2, 25, 0, 13, 8, tzinfo=tzutc())}, + {u'Arn': 'arn:aws:iam::123456789012:policy/FiePolicy', + u'AttachmentCount': 1, + u'CreateDate': datetime.datetime(2015, 3, 21, 19, 18, 21, tzinfo=tzutc()), + u'DefaultVersionId': 'v4', + u'IsAttachable': True, + u'Path': '/', + u'PolicyId': 'ANPAIXQ72B2OH2RZPYQ4Y', + u'PolicyName': 'FiePolicy', + u'UpdateDate': datetime.datetime(2015, 3, 26, 23, 26, 52, tzinfo=tzutc())}], +'ResponseMetadata': {'HTTPStatusCode': 200, + 'RequestId': '4e87c995-ecf2-11e4-bb10-51f1499b3162'}}] + +iam_create_policy = [{u'Policy': {u'PolicyName': 'LambdaChatDynamoDBPolicy', u'CreateDate': datetime.datetime(2015, 4, 27, 12, 13, 35, 240000, tzinfo=tzutc()), u'AttachmentCount': 0, u'IsAttachable': True, u'PolicyId': 'ANPAISQNU4EPZZDVZUOKU', u'DefaultVersionId': 'v1', u'Path': '/kappa/', u'Arn': 'arn:aws:iam::658794617753:policy/kappa/LambdaChatDynamoDBPolicy', u'UpdateDate': datetime.datetime(2015, 4, 27, 12, 13, 35, 240000, tzinfo=tzutc())}, 'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': 'd403e95f-ecd6-11e4-9ee0-15e8b71db930'}}] + +iam_list_roles = [{'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': 'd41415ff-ecd6-11e4-bb10-51f1499b3162'}, u'IsTruncated': False, u'Roles': [{u'AssumeRolePolicyDocument': {u'Version': u'2012-10-17', u'Statement': [{u'Action': u'sts:AssumeRole', u'Principal': {u'Service': u'lambda.amazonaws.com'}, u'Effect': u'Allow', u'Sid': u''}]}, u'RoleId': 'AROAJ4JSNL3M4UYI6GDYS', u'CreateDate': datetime.datetime(2015, 4, 27, 11, 59, 19, tzinfo=tzutc()), u'RoleName': 'FooRole', u'Path': '/kappa/', u'Arn': 'arn:aws:iam::123456789012:role/kappa/FooRole'}]}] + +iam_create_role = [{u'Role': {u'AssumeRolePolicyDocument': {u'Version': u'2012-10-17', u'Statement': [{u'Action': [u'sts:AssumeRole'], u'Effect': u'Allow', u'Principal': {u'Service': [u'lambda.amazonaws.com']}}]}, u'RoleId': 'AROAIT2ZRRPQBOIBBHPZU', u'CreateDate': datetime.datetime(2015, 4, 27, 12, 13, 35, 426000, tzinfo=tzutc()), u'RoleName': 'BazRole', u'Path': '/kappa/', u'Arn': 'arn:aws:iam::123456789012:role/kappa/BazRole'}, 'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': 'd41fd55c-ecd6-11e4-9fd8-03ee0021e940'}}] iam_get_role = [{u'Role': {u'AssumeRolePolicyDocument': {u'Version': u'2012-10-17', u'Statement': [{u'Action': u'sts:AssumeRole', u'Principal': {u'Service': u's3.amazonaws.com'}, u'Effect': u'Allow', u'Condition': {u'ArnLike': {u'sts:ExternalId': u'arn:aws:s3:::*'}}, u'Sid': u''}, {u'Action': u'sts:AssumeRole', u'Principal': {u'Service': u'lambda.amazonaws.com'}, u'Effect': u'Allow', u'Sid': u''}]}, u'RoleId': 'AROAIEVJHUJG2I4MG5PSC', u'CreateDate': datetime.datetime(2015, 1, 6, 17, 37, 44, tzinfo=tzutc()), u'RoleName': 'TestKinesis-InvokeRole-IF6VUXY9MBJN', u'Path': '/', u'Arn': 'arn:aws:iam::0123456789012:role/TestKinesis-InvokeRole-FOO'}, 'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': 'dd6e8d42-9699-11e4-afe6-d3625e8b365b'}}] +iam_attach_role_policy = [{'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': 'd43e32dc-ecd6-11e4-9fd8-03ee0021e940'}}] + +iam_detach_role_policy = [{'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': 'a7d30b51-ecd6-11e4-bbe4-d996b8ad5d9e'}}] + +iam_delete_role = [{'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': 'a7e5a97e-ecd6-11e4-ae9e-6dee7bf37e66'}}] + +lambda_create_function = [{u'FunctionName': u'LambdaChatDynamoDB', 'ResponseMetadata': {'HTTPStatusCode': 201, 'RequestId': 'd7840efb-ecd6-11e4-b8b0-f7f3177894e9'}, u'CodeSize': 22024, u'MemorySize': 128, u'FunctionArn': u'arn:aws:lambda:us-east-1:123456789012:function:FooBarFunction', u'Handler': u'FooBarFunction.handler', u'Role': u'arn:aws:iam::123456789012:role/kappa/BazRole', u'Timeout': 3, u'LastModified': u'2015-04-27T12:13:41.147+0000', u'Runtime': u'nodejs', u'Description': u'A FooBar function'}] + +lambda_delete_function = [{'ResponseMetadata': {'HTTPStatusCode': 204, 'RequestId': 'a499b2c2-ecd6-11e4-8d2a-77b7e55836e7'}}] + logs_describe_log_groups = [{'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': 'da962431-afed-11e4-8c17-1776597471e6'}, u'logGroups': [{u'arn': u'arn:aws:logs:us-east-1:0123456789012:log-group:/aws/lambda/KinesisSample*', @@ -23,13 +69,3 @@ logs_describe_log_streams = [{u'logStreams': [{u'firstEventTimestamp': 1417042749449, u'lastEventTimestamp': 1417042749547, u'creationTime': 1417042748263, u'uploadSequenceToken': u'49540114640150833041490484409222729829873988799393975922', u'logStreamName': u'1cc48e4e613246b7974094323259d600', u'lastIngestionTime': 1417042750483, u'arn': u'arn:aws:logs:us-east-1:0123456789012:log-group:/aws/lambda/KinesisSample:log-stream:1cc48e4e613246b7974094323259d600', u'storedBytes': 712}, {u'firstEventTimestamp': 1417272406988, u'lastEventTimestamp': 1417272407088, u'creationTime': 1417272405690, u'uploadSequenceToken': u'49540113907504451034164105858363493278561872472363261986', u'logStreamName': u'2782a5ff88824c85a9639480d1ed7bbe', u'lastIngestionTime': 1417272408043, u'arn': u'arn:aws:logs:us-east-1:0123456789012:log-group:/aws/lambda/KinesisSample:log-stream:2782a5ff88824c85a9639480d1ed7bbe', u'storedBytes': 712}, {u'firstEventTimestamp': 1420569035842, u'lastEventTimestamp': 1420569035941, u'creationTime': 1420569034614, u'uploadSequenceToken': u'49540113907883563702539166025438885323514410026454245426', u'logStreamName': u'2d62991a479b4ebf9486176122b72a55', u'lastIngestionTime': 1420569036909, u'arn': u'arn:aws:logs:us-east-1:0123456789012:log-group:/aws/lambda/KinesisSample:log-stream:2d62991a479b4ebf9486176122b72a55', u'storedBytes': 709}, {u'firstEventTimestamp': 1418244027421, u'lastEventTimestamp': 1418244027541, u'creationTime': 1418244026907, u'uploadSequenceToken': u'49540113964795065449189116778452984186276757901477438642', u'logStreamName': u'4f44ffa128d6405591ca83b2b0f9dd2d', u'lastIngestionTime': 1418244028484, u'arn': u'arn:aws:logs:us-east-1:0123456789012:log-group:/aws/lambda/KinesisSample:log-stream:4f44ffa128d6405591ca83b2b0f9dd2d', u'storedBytes': 1010}, {u'firstEventTimestamp': 1418242565524, u'lastEventTimestamp': 1418242565641, u'creationTime': 1418242564196, u'uploadSequenceToken': u'49540113095132904942090446312687285178819573422397343074', u'logStreamName': u'69c5ac87e7e6415985116e8cb44e538e', u'lastIngestionTime': 1418242566558, u'arn': u'arn:aws:logs:us-east-1:0123456789012:log-group:/aws/lambda/KinesisSample:log-stream:69c5ac87e7e6415985116e8cb44e538e', u'storedBytes': 713}, {u'firstEventTimestamp': 1417213193378, u'lastEventTimestamp': 1417213193478, u'creationTime': 1417213192095, u'uploadSequenceToken': u'49540113336360065754596187770479764234792559857643841394', u'logStreamName': u'f68e3d87b8a14cdba338f6926f7cf50a', u'lastIngestionTime': 1417213194421, u'arn': u'arn:aws:logs:us-east-1:0123456789012:log-group:/aws/lambda/KinesisSample:log-stream:f68e3d87b8a14cdba338f6926f7cf50a', u'storedBytes': 711}], 'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': '2a6d4941-969b-11e4-947f-19d1c72ede7e'}}] logs_get_log_events = [{'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': '2a7deb71-969b-11e4-914b-8f1f3d7b023d'}, u'nextForwardToken': u'f/31679748107442531967654742688057700554200447759088287749', u'events': [{u'ingestionTime': 1420569036909, u'timestamp': 1420569035842, u'message': u'2015-01-06T18:30:35.841Z\tko2sss03iq7l2pdk\tLoading event\n'}, {u'ingestionTime': 1420569036909, u'timestamp': 1420569035899, u'message': u'START RequestId: 23007242-95d2-11e4-a10e-7b2ab60a7770\n'}, {u'ingestionTime': 1420569036909, u'timestamp': 1420569035940, u'message': u'2015-01-06T18:30:35.940Z\t23007242-95d2-11e4-a10e-7b2ab60a7770\t{\n "Records": [\n {\n "kinesis": {\n "partitionKey": "partitionKey-3",\n "kinesisSchemaVersion": "1.0",\n "data": "SGVsbG8sIHRoaXMgaXMgYSB0ZXN0IDEyMy4=",\n "sequenceNumber": "49545115243490985018280067714973144582180062593244200961"\n },\n "eventSource": "aws:kinesis",\n "eventID": "shardId-000000000000:49545115243490985018280067714973144582180062593244200961",\n "invokeIdentityArn": "arn:aws:iam::0123456789012:role/testLEBRole",\n "eventVersion": "1.0",\n "eventName": "aws:kinesis:record",\n "eventSourceARN": "arn:aws:kinesis:us-east-1:35667example:stream/examplestream",\n "awsRegion": "us-east-1"\n }\n ]\n}\n'}, {u'ingestionTime': 1420569036909, u'timestamp': 1420569035940, u'message': u'2015-01-06T18:30:35.940Z\t23007242-95d2-11e4-a10e-7b2ab60a7770\tDecoded payload: Hello, this is a test 123.\n'}, {u'ingestionTime': 1420569036909, u'timestamp': 1420569035941, u'message': u'END RequestId: 23007242-95d2-11e4-a10e-7b2ab60a7770\n'}, {u'ingestionTime': 1420569036909, u'timestamp': 1420569035941, u'message': u'REPORT RequestId: 23007242-95d2-11e4-a10e-7b2ab60a7770\tDuration: 98.51 ms\tBilled Duration: 100 ms \tMemory Size: 128 MB\tMax Memory Used: 26 MB\t\n'}], u'nextBackwardToken': u'b/31679748105234758193000210997045664445208259969996226560'}] - -cfn_describe_stacks = [ - {u'Stacks': [{u'StackId': 'arn:aws:cloudformation:us-east-1:084307701560:stack/TestKinesis/7c4ae730-96b8-11e4-94cc-5001dc3ed8d2', u'Description': None, u'Tags': [], u'StackStatusReason': 'User Initiated', u'CreationTime': datetime.datetime(2015, 1, 7, 21, 59, 43, 208000, tzinfo=tzutc()), u'Capabilities': ['CAPABILITY_IAM'], u'StackName': 'TestKinesis', u'NotificationARNs': [], u'StackStatus': 'CREATE_IN_PROGRESS', u'DisableRollback': False}], 'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': '7d66debd-96b8-11e4-a647-4f4741ffff69'}}, - {u'Stacks': [{u'StackId': 'arn:aws:cloudformation:us-east-1:084307701560:stack/TestKinesis/7c4ae730-96b8-11e4-94cc-5001dc3ed8d2', u'Description': None, u'Tags': [], u'StackStatusReason': 'User Initiated', u'CreationTime': datetime.datetime(2015, 1, 7, 21, 59, 43, 208000, tzinfo=tzutc()), u'Capabilities': ['CAPABILITY_IAM'], u'StackName': 'TestKinesis', u'NotificationARNs': [], u'StackStatus': 'CREATE_IN_PROGRESS', u'DisableRollback': False}], 'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': '7e36fff7-96b8-11e4-af44-6350f4f8c2ae'}}, - {u'Stacks': [{u'StackId': 'arn:aws:cloudformation:us-east-1:084307701560:stack/TestKinesis/7c4ae730-96b8-11e4-94cc-5001dc3ed8d2', u'Description': None, u'Tags': [], u'StackStatusReason': 'User Initiated', u'CreationTime': datetime.datetime(2015, 1, 7, 21, 59, 43, 208000, tzinfo=tzutc()), u'Capabilities': ['CAPABILITY_IAM'], u'StackName': 'TestKinesis', u'NotificationARNs': [], u'StackStatus': 'CREATE_IN_PROGRESS', u'DisableRollback': False}], 'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': '7ef03e10-96b8-11e4-bc86-7f67e11abcfa'}}, - {u'Stacks': [{u'StackId': 'arn:aws:cloudformation:us-east-1:084307701560:stack/TestKinesis/7c4ae730-96b8-11e4-94cc-5001dc3ed8d2', u'Description': None, u'Tags': [], u'StackStatusReason': None, u'CreationTime': datetime.datetime(2015, 1, 7, 21, 59, 43, 208000, tzinfo=tzutc()), u'Capabilities': ['CAPABILITY_IAM'], u'StackName': 'TestKinesis', u'NotificationARNs': [], u'StackStatus': 'CREATE_COMPLETE', u'DisableRollback': False}], 'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': '8c2bff8e-96b8-11e4-be70-c5ad82c32f2d'}}] - -cfn_create_stack = [{u'StackId': 'arn:aws:cloudformation:us-east-1:084307701560:stack/TestKinesis/7c4ae730-96b8-11e4-94cc-5001dc3ed8d2', 'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': '7c2f2260-96b8-11e4-be70-c5ad82c32f2d'}}] - -cfn_delete_stack = [{'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': 'f19af5b8-96bc-11e4-860e-11ba752b58a9'}}] diff --git a/tests/unit/test_stack.py b/tests/unit/test_policy.py similarity index 51% rename from tests/unit/test_stack.py rename to tests/unit/test_policy.py index 9b038c4..f2c93dc 100644 --- a/tests/unit/test_stack.py +++ b/tests/unit/test_policy.py @@ -16,52 +16,58 @@ import mock -from kappa.stack import Stack +from kappa.policy import Policy from tests.unit.mock_aws import get_aws -Config = { - 'template': 'roles.cf', - 'stack_name': 'FooBar', - 'exec_role': 'ExecRole', - 'invoke_role': 'InvokeRole'} +Config1 = { + 'name': 'FooPolicy', + 'description': 'This is the Foo policy', + 'document': 'FooPolicy.json'} + +Config2 = { + 'name': 'BazPolicy', + 'description': 'This is the Baz policy', + 'document': 'BazPolicy.json'} def path(filename): return os.path.join(os.path.dirname(__file__), 'data', filename) -class TestStack(unittest.TestCase): +class TestPolicy(unittest.TestCase): def setUp(self): self.aws_patch = mock.patch('kappa.aws.get_aws', get_aws) self.mock_aws = self.aws_patch.start() - Config['template'] = path(Config['template']) + Config1['document'] = path(Config1['document']) + Config2['document'] = path(Config2['document']) def tearDown(self): self.aws_patch.stop() def test_properties(self): mock_context = mock.Mock() - stack = Stack(mock_context, Config) - self.assertEqual(stack.name, Config['stack_name']) - self.assertEqual(stack.template_path, Config['template']) - self.assertEqual(stack.exec_role, Config['exec_role']) - self.assertEqual(stack.invoke_role, Config['invoke_role']) - self.assertEqual( - stack.invoke_role_arn, - 'arn:aws:iam::0123456789012:role/TestKinesis-InvokeRole-FOO') + policy = Policy(mock_context, Config1) + self.assertEqual(policy.name, Config1['name']) + self.assertEqual(policy.document, Config1['document']) + self.assertEqual(policy.description, Config1['description']) def test_exists(self): mock_context = mock.Mock() - stack = Stack(mock_context, Config) - self.assertTrue(stack.exists()) + policy = Policy(mock_context, Config1) + self.assertTrue(policy.exists()) + + def test_not_exists(self): + mock_context = mock.Mock() + policy = Policy(mock_context, Config2) + self.assertFalse(policy.exists()) - def test_update(self): + def test_create(self): mock_context = mock.Mock() - stack = Stack(mock_context, Config) - stack.update() + policy = Policy(mock_context, Config2) + policy.create() def test_delete(self): mock_context = mock.Mock() - stack = Stack(mock_context, Config) - stack.delete() + policy = Policy(mock_context, Config1) + policy.delete() diff --git a/tests/unit/test_role.py b/tests/unit/test_role.py new file mode 100644 index 0000000..2d731d5 --- /dev/null +++ b/tests/unit/test_role.py @@ -0,0 +1,58 @@ +# Copyright (c) 2015 Mitch Garnaat http://garnaat.org/ +# +# Licensed under the Apache License, Version 2.0 (the "License"). You +# may not use this file except in compliance with the License. A copy of +# the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF +# ANY KIND, either express or implied. See the License for the specific +# language governing permissions and limitations under the License. + +import unittest + +import mock + +from kappa.role import Role +from tests.unit.mock_aws import get_aws + +Config1 = {'name': 'FooRole'} + +Config2 = {'name': 'BazRole'} + + +class TestRole(unittest.TestCase): + + def setUp(self): + self.aws_patch = mock.patch('kappa.aws.get_aws', get_aws) + self.mock_aws = self.aws_patch.start() + + def tearDown(self): + self.aws_patch.stop() + + def test_properties(self): + mock_context = mock.Mock() + role = Role(mock_context, Config1) + self.assertEqual(role.name, Config1['name']) + + def test_exists(self): + mock_context = mock.Mock() + role = Role(mock_context, Config1) + self.assertTrue(role.exists()) + + def test_not_exists(self): + mock_context = mock.Mock() + role = Role(mock_context, Config2) + self.assertFalse(role.exists()) + + def test_create(self): + mock_context = mock.Mock() + role = Role(mock_context, Config2) + role.create() + + def test_delete(self): + mock_context = mock.Mock() + role = Role(mock_context, Config1) + role.delete() From 904ea3736118faff26bd6228ce2a68ec54cb4193 Mon Sep 17 00:00:00 2001 From: Mitch Garnaat Date: Mon, 27 Apr 2015 17:03:39 -0700 Subject: [PATCH 7/7] Updating boto3 dependency. --- requirements.txt | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 868c33d..17866c1 100755 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -boto3==0.0.15 +boto3==0.0.16 click==4.0 PyYAML>=3.11 mock>=1.0.1 diff --git a/setup.py b/setup.py index e0f8028..38bee7a 100755 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ import os requires = [ - 'boto3==0.0.15', + 'boto3==0.0.16', 'click==4.0', 'PyYAML>=3.11' ]