Skip to content

Commit

Permalink
[aws][fix]: Update S3 bucket tests for updated implementation (#2281)
Browse files Browse the repository at this point in the history
  • Loading branch information
1101-1 authored Nov 15, 2024
1 parent 5b8dc07 commit 3fff8b5
Show file tree
Hide file tree
Showing 4 changed files with 67 additions and 22 deletions.
2 changes: 1 addition & 1 deletion plugins/aws/fix_plugin_aws/resource/ssm.py
Original file line number Diff line number Diff line change
Expand Up @@ -360,7 +360,7 @@ class AwsSSMResourceCompliance(AwsResource, PhantomBaseResource):
"ssm",
"list-resource-compliance-summaries",
"ResourceComplianceSummaryItems",
{"Filters": [{"Key": "Status", "Values": ["COMPLIANT"], "Type": "EQUAL"}]},
{"Filters": [{"Key": "Status", "Values": ["NON_COMPLIANT"], "Type": "EQUAL"}]},
)
mapping: ClassVar[Dict[str, Bender]] = {
"id": S("Id"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,24 +3,36 @@
{
"Id": "bucketsizebytes_aws_s3_bucketname_bucket_1_storagetype_standardstorage_average",
"Label": "BucketSizeBytes",
"Timestamps": [ "2024-04-30T12:50:00+00:00" ],
"Values": [ 1 ],
"Timestamps": [
"2024-04-30T12:50:00+00:00"
],
"Values": [
1
],
"StatusCode": "Complete"
},
{
"Id": "bucketsizebytes_aws_s3_bucketname_bucket_1_storagetype_intelligenttieringstorage_average",
"Label": "BucketSizeBytes",
"Timestamps": [ "2024-04-30T12:50:00+00:00" ],
"Values": [ 2 ],
"Timestamps": [
"2024-04-30T12:50:00+00:00"
],
"Values": [
2
],
"StatusCode": "Complete"
},
{
"Id": "bucketsizebytes_aws_s3_bucketname_bucket_1_storagetype_standardiastorage_average",
"Label": "BucketSizeBytes",
"Timestamps": [ "2024-04-30T12:50:00+00:00" ],
"Values": [ 3 ],
"Timestamps": [
"2024-04-30T12:50:00+00:00"
],
"Values": [
3
],
"StatusCode": "Complete"
}
],
"Messages": []
}
}
61 changes: 47 additions & 14 deletions plugins/aws/test/resources/s3_test.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,14 @@
from fixlib.graph import Graph
from test.resources import round_trip_for
from concurrent.futures import ThreadPoolExecutor
from datetime import datetime, timedelta
from types import SimpleNamespace
from typing import cast, Any, Callable
from typing import cast, Any, Callable, List
from fix_plugin_aws.resource.base import AwsRegion, GraphBuilder
from fix_plugin_aws.resource.cloudwatch import update_resource_metrics, AwsCloudwatchMetricData, AwsCloudwatchQuery
from fix_plugin_aws.aws_client import AwsClient
from fix_plugin_aws.resource.s3 import AwsS3Bucket, AwsS3AccountSettings
from fixlib.threading import ExecutorQueue
from fixlib.graph import Graph
from test.resources import round_trip_for


def test_buckets() -> None:
Expand Down Expand Up @@ -62,14 +67,42 @@ def validate_delete_args(aws_service: str, fn: Callable[[Any], None]) -> Any:
bucket.delete_resource(client, Graph())


# TODO: fix 'RuntimeError: cannot schedule new futures after shutdown'
# def test_s3_usage_metrics(account_collector: AwsAccountCollector) -> None:
# bucket, builder = round_trip_for(AwsS3Bucket)
# builder.all_regions.update({"us-east-1": AwsRegion(id="us-east-1", name="us-east-1")})
# account_collector.collect_usage_metrics(builder)
# bucket.complete_graph(builder, {})
# assert bucket._resource_usage["standard_storage_bucket_size_bytes"]["avg"] == 1.0
# assert bucket._resource_usage["intelligent_tiering_storage_bucket_size_bytes"]["avg"] == 2.0
# assert bucket._resource_usage["standard_ia_storage_bucket_size_bytes"]["avg"] == 3.0
# # This values is computed internally using the other values. If the number does not match, the logic is broken!
# assert bucket._resource_usage["bucket_size_bytes"]["avg"] == 6.0
def test_s3_usage_metrics() -> None:
bucket, builder = round_trip_for(AwsS3Bucket, "bucket_lifecycle_policy")
builder.all_regions.update({"us-east-1": AwsRegion(id="us-east-1", name="us-east-1")})
queries = bucket.collect_usage_metrics(builder)
lookup_map = {}
lookup_map[bucket.id] = bucket

# simulates the `collect_usage_metrics` method found in `AwsAccountCollector`.
def collect_and_set_metrics(start_at: datetime, region: AwsRegion, queries: List[AwsCloudwatchQuery]) -> None:
with ThreadPoolExecutor(max_workers=1) as executor:
queue = ExecutorQueue(executor, tasks_per_key=lambda _: 1, name="test")
g_builder = GraphBuilder(
builder.graph,
builder.cloud,
builder.account,
region,
{region.id: region},
builder.client,
queue,
builder.core_feedback,
last_run_started_at=builder.last_run_started_at,
)
result = AwsCloudwatchMetricData.query_for_multiple(
g_builder, start_at, start_at + timedelta(hours=2), queries
)
update_resource_metrics(lookup_map, result)
# compute bucket_size_bytes
for after_collect in builder.after_collect_actions:
after_collect()

start = datetime(2020, 5, 30, 15, 45, 30)

collect_and_set_metrics(start, AwsRegion(id="us-east-1", name="us-east-1"), queries)

assert bucket._resource_usage["standard_storage_bucket_size_bytes"]["avg"] == 1.0
assert bucket._resource_usage["intelligent_tiering_storage_bucket_size_bytes"]["avg"] == 2.0
assert bucket._resource_usage["standard_ia_storage_bucket_size_bytes"]["avg"] == 3.0
# This values is computed internally using the other values. If the number does not match, the logic is broken!
assert bucket._resource_usage["bucket_size_bytes"]["avg"] == 6.0

0 comments on commit 3fff8b5

Please sign in to comment.