Skip to content

Commit

Permalink
Merge pull request newrelic#43 from newrelic/add_additional_attributes
Browse files Browse the repository at this point in the history
Allow adding additional attributes
  • Loading branch information
danybmx authored Aug 26, 2022
2 parents 2265011 + a6c9cdf commit 0e40e7f
Show file tree
Hide file tree
Showing 3 changed files with 85 additions and 21 deletions.
3 changes: 2 additions & 1 deletion serverless.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,8 @@ functions:
S3_CLOUDTRAIL_LOG_PATTERN: ${env:S3_CLOUDTRAIL_LOG_PATTERN}
S3_IGNORE_PATTERN: ${env:S3_IGNORE_PATTERN}
BATCH_SIZE_FACTOR: ${env:BATCH_SIZE_FACTOR}

ADDITIONAL_ATTRIBUTES: ${env:ADDITIONAL_ATTRIBUTES}

events:
- s3:
bucket: ${env:S3_BUCKET_NAME}
Expand Down
73 changes: 56 additions & 17 deletions src/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,49 @@

US_LOGGING_INGEST_HOST = "https://log-api.newrelic.com/log/v1"
EU_LOGGING_INGEST_HOST = 'https://log-api.eu.newrelic.com/log/v1'
LOGGING_LAMBDA_VERSION = '1.1.4'
LOGGING_LAMBDA_VERSION = '1.1.5'
LOGGING_PLUGIN_METADATA = {
'type': "s3-lambda",
'version': LOGGING_LAMBDA_VERSION
}


class InvalidArgumentException(Exception):
pass


def _format_error(e, text):
return "{}. {}".format(e, text)

def _get_optional_env(key, default):
"""
Returns the default value even if the environment variable is set but empty
"""
return os.getenv(key, default) or default

def _get_additional_attributes(attributes=None):
"""
This function gets Environment variable 'ADDITIONAL_ATTRIBUTES' and parses the same as a json object. Defaults
to an empty map.
:param `additional_attributes` : Returns the parameter value if present
:raises
InvalidArgumentException : If the os environment variable 'ADDITIONAL_ATTRIBUTES' is not a valid json object or
If the os environment variable 'ADDITIONAL_ATTRIBUTES' is not of type (str, bytes or bytearray).
:return: Dict of attributes (key,value) to add to payload
"""
if attributes:
return attributes
env_attributes = _get_optional_env("ADDITIONAL_ATTRIBUTES", "{}")
try:
return json.loads(env_attributes)
except json.JSONDecodeError as e:
raise InvalidArgumentException(_format_error(e, "Invalid Json object"))
except TypeError as e:
raise InvalidArgumentException(_format_error(e, "The type of object should be one of the following (str, "
"bytes or bytearray)"))


additional_attributes = _get_additional_attributes()
# Maximum number of retries
MAX_RETRIES = 5
# Initial backoff (in seconds) between retries
Expand Down Expand Up @@ -56,7 +93,7 @@ def _is_ignore_log_file(key=None, regex_pattern=None):
This functions checks whether this log file should be ignored based on regex pattern.
"""
if not regex_pattern:
regex_pattern = os.getenv("S3_IGNORE_PATTERN", "$^")
regex_pattern = _get_optional_env("S3_IGNORE_PATTERN", "$^")

return bool(re.search(regex_pattern, key))

Expand All @@ -66,7 +103,7 @@ def _isCloudTrail(key=None, regex_pattern=None):
This functions checks whether this log file is a CloudTrail log based on regex pattern.
"""
if not regex_pattern:
regex_pattern = os.getenv(
regex_pattern = _get_optional_env(
"S3_CLOUDTRAIL_LOG_PATTERN", ".*CloudTrail.*\.json.gz$")

return bool(re.search(regex_pattern, key))
Expand All @@ -84,30 +121,30 @@ def _get_batch_size_factor(batch_size_factor=None):
"""
if batch_size_factor:
return batch_size_factor
return _convert_float(os.getenv("BATCH_SIZE_FACTOR", BATCH_SIZE_FACTOR))
return _convert_float(_get_optional_env("BATCH_SIZE_FACTOR", BATCH_SIZE_FACTOR))

def _get_license_key(license_key=None):
"""
This functions gets New Relic's license key from env vars.
"""
if license_key:
return license_key
return os.getenv("LICENSE_KEY", "")
return _get_optional_env("LICENSE_KEY", "")


def _get_log_type(log_type=None):
"""
This functions gets the New Relic logtype from env vars.
"""
return log_type or os.getenv("LOG_TYPE") or os.getenv("LOGTYPE", "")
return log_type or _get_optional_env("LOG_TYPE", "")


def _setting_console_logging_level():
"""
Determines whether or not debug logging should be enabled based on the env var.
Defaults to false.
"""
if os.getenv("DEBUG_ENABLED", "false").lower() == "true":
if _get_optional_env("DEBUG_ENABLED", "false").lower() == "true":
print("enabling debug mode")
logger.setLevel(logging.DEBUG)
else:
Expand Down Expand Up @@ -151,16 +188,19 @@ def _package_log_payload(data):

for line in logLines:
log_messages.append({'message': line})
attributes = {
"plugin": LOGGING_PLUGIN_METADATA,
"aws": {
"invoked_function_arn": data["context"]["invoked_function_arn"],
"s3_bucket_name": data["context"]["s3_bucket_name"],
"s3_key": data["context"]["s3_key"]},
"logtype": _get_log_type()
}
packaged_payload = [
{
"common": {
"attributes": {
"plugin": LOGGING_PLUGIN_METADATA,
"aws": {
"invoked_function_arn": data["context"]["invoked_function_arn"],
"s3_bucket_name": data["context"]["s3_bucket_name"]},
"logtype": _get_log_type()
}},
"attributes": {**attributes, **additional_attributes}
},
"logs": log_messages,
}]
return packaged_payload
Expand All @@ -175,8 +215,6 @@ def create_request(payload, ingest_url=None, license_key=None):


async def send_log(session, url, data, headers):
def _format_error(e, text):
return "{}. {}".format(e, text)
global completed_requests
backoff = INITIAL_BACKOFF
retries = 0
Expand Down Expand Up @@ -242,7 +280,8 @@ async def _fetch_data_from_s3(bucket, key, context):
BATCH_SIZE_FACTOR = _get_batch_size_factor()
s3MetaData = {
"invoked_function_arn": context.invoked_function_arn,
"s3_bucket_name": bucket
"s3_bucket_name": bucket,
"s3_key": key
}
log_file_url = "s3://{}/{}".format(bucket, key)
async with aiohttp.ClientSession() as session:
Expand Down
30 changes: 27 additions & 3 deletions template.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ Metadata:
ReadmeUrl: README.md
Labels: ['newrelic', 'logs', 'logging', 'ingestion', 'lambda', 's3']
HomePageUrl: https://github.com/newrelic/aws_s3_log_ingestion_lambda
SemanticVersion: 1.1.4
SemanticVersion: 1.1.5
SourceCodeUrl: https://github.com/newrelic/aws_s3_log_ingestion_lambda

AWS::CloudFormation::Interface:
Expand Down Expand Up @@ -39,17 +39,33 @@ Parameters:
Type: String
Description: A boolean to determine if you want to output debug messages in the CloudWatch console
Default: "false"
AdditionalAttributes:
Type: String
Description: "(Optional) A string containing json object(string,string). These attributes will be added to New Relic payload."
Default: ""
S3CloudTrailLogPattern:
Type: String
Description: "(Optional) Regex pattern to check if the file is from CloudTrail"
Default: ""
S3IgnorePattern:
Type: String
Description: "(Optional) Regex pattern to ignore files"
Default: ""
BatchSizeFactor:
Type: String
Description: "(Optional) Indicates the expected compression factor of your logs. Used to check if logs could be sent to the API (that is limited to 1Mb of compressed data)"
Default: ""
FunctionRole:
Type: String
Description: |
(Optional) The ARN of an IAM role to use as this function's execution role. Should provide the AWSLambdaBasicExecutionRole policy.
If not specified, an appropriate Role will be created, which will require CAPABILITY_IAM to be acknowledged.
Default: ''
Default: ""
PermissionsBoundary:
Type: String
Description: |
(Optional) The ARN of a permissions boundary to use for this function's execution role. This property works only if the role is generated for you.
Default: ''
Default: ""

Conditions:
NoRole: !Equals ['', !Ref FunctionRole]
Expand All @@ -72,6 +88,10 @@ Resources:
LICENSE_KEY: !Ref NRLicenseKey
LOG_TYPE: !Ref NRLogType
DEBUG_ENABLED: !Ref DebugEnabled
S3_CLOUD_TRAIL_LOG_PATTERN: !Ref S3CloudTrailLogPattern
S3_IGNORE_PATTERN: !Ref S3IgnorePattern
BATCH_SIZE_FACTOR: !Ref BatchSizeFactor
ADDITIONAL_ATTRIBUTES: !Ref AdditionalAttributes
PermissionsBoundary: !If [ HasPermissionBoundary, !Ref PermissionsBoundary, !Ref AWS::NoValue ]
Role: !Ref FunctionRole
Policies:
Expand Down Expand Up @@ -105,6 +125,10 @@ Resources:
LICENSE_KEY: !Ref NRLicenseKey
LOG_TYPE: !Ref NRLogType
DEBUG_ENABLED: !Ref DebugEnabled
S3_CLOUD_TRAIL_LOG_PATTERN: !Ref S3CloudTrailLogPattern
S3_IGNORE_PATTERN: !Ref S3IgnorePattern
BATCH_SIZE_FACTOR: !Ref BatchSizeFactor
ADDITIONAL_ATTRIBUTES: !Ref AdditionalAttributes
PermissionsBoundary: !If [ HasPermissionBoundary, !Ref PermissionsBoundary, !Ref AWS::NoValue ]
Policies:
- Version: '2012-10-17'
Expand Down

0 comments on commit 0e40e7f

Please sign in to comment.