Skip to content

Commit

Permalink
Merge branch 'main' into nm/access-edge-creator-process
Browse files Browse the repository at this point in the history
  • Loading branch information
meln1k authored Sep 24, 2024
2 parents b8d31e1 + 0c2bc07 commit dcccb29
Show file tree
Hide file tree
Showing 33 changed files with 147 additions and 68 deletions.
1 change: 0 additions & 1 deletion fixcore/fixcore/model/exportable_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ def json_export_simple_schema(
with_properties: bool = True,
with_relatives: bool = True,
with_metadata: bool = True,
aggregate_roots_only: bool = False,
) -> List[Json]:
def export_simple(kind: SimpleKind) -> Json:
result = kind.as_json()
Expand Down
5 changes: 3 additions & 2 deletions fixcore/fixcore/model/graph_access.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
from fixcore.model.resolve_in_graph import GraphResolver, NodePath, ResolveProp
from fixcore.model.typed_model import from_js
from fixcore.types import Json, EdgeType, JsonElement
from fixcore.util import utc, utc_str, value_in_path, set_value_in_path, value_in_path_get, path_exists
from fixcore.util import utc, utc_str, value_in_path, set_value_in_path, path_exists

log = logging.getLogger(__name__)

Expand Down Expand Up @@ -477,6 +477,7 @@ def resolve(self) -> None:

def __resolve_count_descendants(self) -> None:
visited: Set[str] = set()
empty_set: Set[str] = set()

def count_successors_by(node_id: NodeId, edge_type: EdgeType, path: List[str]) -> Dict[str, int]:
result: Dict[str, int] = {}
Expand All @@ -487,7 +488,7 @@ def count_successors_by(node_id: NodeId, edge_type: EdgeType, path: List[str]) -
if elem_id not in visited:
visited.add(elem_id)
elem = self.nodes[elem_id]
if not value_in_path_get(elem, NodePath.is_phantom, False):
if "phantom" not in elem.get("kinds_set", empty_set):
extracted = value_in_path(elem, path)
if isinstance(extracted, str):
result[extracted] = result.get(extracted, 0) + 1
Expand Down
2 changes: 1 addition & 1 deletion fixcore/fixcore/model/model_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ def code_model() -> Model:
The model is only loaded on demand and only once.
"""
load_plugin_classes()
return Model.from_kinds([from_js(m, Kind) for m in export_model()]) # type: ignore
return Model.from_kinds([from_js(m, Kind) for m in export_model(with_kind_description=True)]) # type: ignore


class ModelHandlerFromCodeAndDB(ModelHandlerDB):
Expand Down
1 change: 0 additions & 1 deletion fixcore/fixcore/model/resolve_in_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ class NodePath:
descendant_summary = ["metadata", "descendant_summary"]
descendant_count = ["metadata", "descendant_count"]
python_type = ["metadata", "python_type"]
is_phantom = ["metadata", "phantom"]
from_node = ["from"]
to_node = ["to"]
edge_type = ["edge_type"]
Expand Down
9 changes: 0 additions & 9 deletions fixcore/fixcore/static/api-doc.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -780,7 +780,6 @@ paths:
name: root
metadata:
cleaned: false
phantom: true
protected: false
application/yaml:
example: |
Expand All @@ -795,7 +794,6 @@ paths:
metadata:
python_type: fixlib.baseresources.GraphRoot
cleaned: false
phantom: true
protected: false
kinds:
- graph_root
Expand All @@ -813,7 +811,6 @@ paths:
metadata:
python_type: fixlib.baseresources.Cloud
cleaned: false
phantom: false
protected: false
descendant_summary:
onelogin_account: 1
Expand Down Expand Up @@ -983,7 +980,6 @@ paths:
name: root
metadata:
cleaned: false
phantom: true
protected: false
application/yaml:
example: |
Expand All @@ -998,7 +994,6 @@ paths:
metadata:
python_type: fixlib.baseresources.GraphRoot
cleaned: false
phantom: true
protected: false
kinds:
- graph_root
Expand All @@ -1016,7 +1011,6 @@ paths:
metadata:
python_type: fixlib.baseresources.Cloud
cleaned: false
phantom: false
protected: false
descendant_summary:
onelogin_account: 1
Expand Down Expand Up @@ -1454,7 +1448,6 @@ paths:
name: root
metadata:
cleaned: false
phantom: true
protected: false
application/yaml:
example: |
Expand All @@ -1469,7 +1462,6 @@ paths:
metadata:
python_type: fixlib.baseresources.GraphRoot
cleaned: false
phantom: true
protected: false
kinds:
- graph_root
Expand All @@ -1487,7 +1479,6 @@ paths:
metadata:
python_type: fixlib.baseresources.Cloud
cleaned: false
phantom: false
protected: false
descendant_summary:
onelogin_account: 1
Expand Down
9 changes: 6 additions & 3 deletions fixcore/tests/fixcore/model/graph_access_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,8 +201,11 @@ def add_node(node_id: NodeId) -> None:
"kind": kind,
"some": {"deep": {"nested": node_id}},
}
kinds = [kind]
metadata = {}
# for sake of testing: declare parent as phantom resource
metadata = {"phantom": True} if kind == "parent" else {}
if kind == "parent":
kinds.append("phantom")
if node_id.startswith(replace_on):
metadata["replace"] = True
g.add_node(
Expand All @@ -211,8 +214,8 @@ def add_node(node_id: NodeId) -> None:
reported=reported,
metadata=metadata,
kind=kind,
kinds=[kind],
kinds_set={kind},
kinds=kinds,
kinds_set=set(kinds),
)

def add_edge(from_node: str, to_node: str, edge_type: EdgeType = EdgeTypes.default) -> None:
Expand Down
14 changes: 4 additions & 10 deletions fixlib/fixlib/baseresources.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,13 +254,9 @@ class BaseResource(ABC):
the resource within the Graph. The name is used for display purposes. Tags are
key/value pairs that get exported in the GRAPHML view.
There's also class variables, kind, phantom and reference_kinds.
There's also class variables, kind, and reference_kinds.
`kind` is a string describing the type of resource, e.g. 'aws_ec2_instance'
or 'some_cloud_load_balancer'.
`phantom` is a bool describing whether the resource actually exists within
the cloud or if it's just a phantom resource like pricing information
or usage quota. I.e. some information relevant to the cloud account
but not actually existing in the form of a usable resource.
`reference_kinds` is a list of kinds that can be connected to this resource for
the related edge type as successor or predecessor.
"""
Expand All @@ -269,7 +265,6 @@ class BaseResource(ABC):
kind_display: ClassVar[str] = "Resource"
kind_description: ClassVar[str] = "A generic resource."
kind_service: ClassVar[Optional[str]] = None
phantom: ClassVar[bool] = False
reference_kinds: ClassVar[ModelReference] = {}
metadata: ClassVar[Dict[str, Any]] = {"icon": "resource", "group": "misc"}

Expand Down Expand Up @@ -456,7 +451,7 @@ def clean(self) -> bool:
@clean.setter
@unless_protected
def clean(self, value: bool) -> None:
if self.phantom and value:
if isinstance(self, PhantomBaseResource) and value:
raise ValueError(f"Can't cleanup phantom resource {self.rtdname}")

clean_str = "" if value else "not "
Expand Down Expand Up @@ -492,7 +487,7 @@ def protected(self, value: bool) -> None:
@metrics_resource_cleanup.time()
@unless_protected
def cleanup(self, graph: Optional[Any] = None) -> bool:
if self.phantom:
if isinstance(self, PhantomBaseResource):
raise RuntimeError(f"Can't cleanup phantom resource {self.rtdname}")

if self.cleaned:
Expand Down Expand Up @@ -541,7 +536,7 @@ def pre_cleanup(self, graph: Optional[Any] = None) -> bool:
if graph is None:
graph = self._graph

if self.phantom:
if isinstance(self, PhantomBaseResource):
raise RuntimeError(f"Can't cleanup phantom resource {self.rtdname}")

if self.cleaned:
Expand Down Expand Up @@ -760,7 +755,6 @@ class PhantomBaseResource(BaseResource):
kind: ClassVar[str] = "phantom_resource"
kind_display: ClassVar[str] = "Phantom Resource"
kind_description: ClassVar[str] = "A generic phantom resource."
phantom: ClassVar[bool] = True

def update_tag(self, key: str, value: str) -> bool:
log.error(f"Resource {self.rtdname} is a phantom resource and does not maintain tags")
Expand Down
21 changes: 14 additions & 7 deletions fixlib/fixlib/core/model_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,8 @@ def dataclasses_to_fixcore_model(
aggregate_root: Optional[Type[Any]] = None,
walk_subclasses: bool = True,
use_optional_as_required: bool = False,
with_description: bool = True,
with_kind_description: bool = False,
with_prop_description: bool = False,
) -> List[Json]:
"""
Analyze all transitive dataclasses and create the model
Expand All @@ -163,7 +164,8 @@ def dataclasses_to_fixcore_model(
:param aggregate_root: if a type is a subtype of this type, it will be considered an aggregate root.
:param walk_subclasses: if true, all subclasses of the given classes will be analyzed as well.
:param use_optional_as_required: if true, all non-optional fields will be considered required.
:param with_description: if true, include the description for classes and properties.
:param with_kind_description: if true, include the description for classes.
:param with_prop_description: if true, include the description for properties.
:return: the model definition in the fixcore json format.
"""

Expand All @@ -176,7 +178,7 @@ def prop(field: Attribute) -> List[Json]: # type: ignore
meta = field.metadata.copy()
kind = meta.pop("type_hint", model_name(field.type))
desc = meta.pop("description", None)
desc = desc if with_description else None
desc = desc if with_prop_description else None
required = meta.pop("required", use_optional_as_required and not is_optional(field.type)) # type: ignore
synthetic = meta.pop("synthetic", None)
synthetic = synthetic if synthetic else {}
Expand Down Expand Up @@ -253,7 +255,13 @@ def export_data_class(clazz: type) -> None:
metadata["service"] = s
if (slc := getattr(clazz, "categories", None)) and callable(slc) and (sl := slc()):
metadata["categories"] = sl
if with_description and (s := clazz.__dict__.get("kind_description", None)) and isinstance(s, str):
if ( # only export kind description on aggregate roots
with_kind_description
and (ar := aggregate_root)
and issubclass(clazz, ar)
and (s := clazz.__dict__.get("kind_description", None))
and isinstance(s, str)
):
metadata["description"] = s

model.append(
Expand Down Expand Up @@ -293,9 +301,9 @@ def literal_name(en: Enum) -> str:
# Use this model exporter, if a dynamic object is exported
# with given name and properties.
def dynamic_object_to_fixcore_model(
name: str, properties: Dict[str, type], aggregate_root: bool = True, traverse_dependant: bool = True
name: str, properties: Dict[str, type], aggregate_root: bool = True, traverse_dependant: bool = True, **kwargs: Any
) -> List[Json]:
dependant = dataclasses_to_fixcore_model(set(properties.values())) if traverse_dependant else []
dependant = dataclasses_to_fixcore_model(set(properties.values()), **kwargs) if traverse_dependant else []
# append definition for top level object
dependant.append(
{
Expand Down Expand Up @@ -331,7 +339,6 @@ def node_to_dict(node: BaseResource, changes_only: bool = False, include_revisio
"metadata": {
"python_type": type_str(node),
"cleaned": node.cleaned,
"phantom": node.phantom,
"protected": node.protected,
"categories": node.categories(),
**node._metadata,
Expand Down
2 changes: 1 addition & 1 deletion fixlib/fixlib/graph/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -398,7 +398,7 @@ def export_model(graph: Optional[Graph] = None, **kwargs: Any) -> List[Json]:
for node in graph.nodes:
classes.add(type(node))

model = resource_classes_to_fixcore_model(classes, aggregate_root=BaseResource, with_description=False, **kwargs)
model = resource_classes_to_fixcore_model(classes, aggregate_root=BaseResource, **kwargs)
for resource_model in model:
if resource_model.get("fqn") == "resource":
resource_model.get("properties", []).append(
Expand Down
9 changes: 7 additions & 2 deletions fixlib/test/core/model_export_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ def test_enum_to_model() -> None:


def test_dataclasses_to_fixcore_model() -> None:
result = dataclasses_to_fixcore_model({DataClassExample})
result = dataclasses_to_fixcore_model({DataClassExample}, with_kind_description=True, with_prop_description=True)
assert len(result) == 5
for r in result:
props = {p["name"]: p for p in r.get("properties", [])}
Expand Down Expand Up @@ -187,7 +187,12 @@ class GcpTestConfigConfig:
def test_config_export():
# Let's assume a dynamic top level object of name Config
# The properties are defined by name and related type.
result = dynamic_object_to_fixcore_model("config", {"aws": AwsTestConfig, "gcp": GcpTestConfigConfig})
result = dynamic_object_to_fixcore_model(
"config",
{"aws": AwsTestConfig, "gcp": GcpTestConfigConfig},
with_kind_description=True,
with_prop_description=True,
)
result_dict = {a["fqn"]: a for a in result}
assert len(result_dict["gcp_config"]["properties"]) == 1
assert len(result_dict["aws_config"]["properties"]) == 2
Expand Down
5 changes: 3 additions & 2 deletions plugins/aws/fix_plugin_aws/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
Cloud,
metrics_resource_cleanup_exceptions,
metrics_resource_pre_cleanup_exceptions,
PhantomBaseResource,
)
from fixlib.config import Config, RunningConfig
from fixlib.core.actions import CoreFeedback
Expand Down Expand Up @@ -272,7 +273,7 @@ def pre_cleanup(config: Config, resource: BaseResource, graph: Graph) -> bool:
resource.log("Modification was requested even though resource is protected" " - refusing")
return False

if resource.phantom:
if isinstance(resource, PhantomBaseResource):
log.warning(f"Can't cleanup phantom resource {resource.rtdname}")
return False

Expand Down Expand Up @@ -314,7 +315,7 @@ def pre_cleanup(config: Config, resource: BaseResource, graph: Graph) -> bool:
@staticmethod
def cleanup(config: Config, resource: BaseResource, graph: Graph) -> bool:
if isinstance(resource, AwsResource):
if resource.phantom:
if isinstance(resource, PhantomBaseResource):
raise RuntimeError(f"Can't cleanup phantom resource {resource.rtdname}")

if resource.cleaned:
Expand Down
16 changes: 16 additions & 0 deletions plugins/aws/fix_plugin_aws/resource/backup.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import logging
from datetime import datetime
from typing import Any, ClassVar, Dict, Optional, List, Type
from json import loads as json_loads

from attrs import define, field

Expand All @@ -18,6 +19,7 @@
from fixlib.graph import Graph
from fixlib.json_bender import F, Bender, S, ForallBend, Bend
from fixlib.types import Json
from fixlib.json import sort_json

log = logging.getLogger("fix.plugins.aws")
service_name = "backup"
Expand Down Expand Up @@ -345,13 +347,15 @@ class AwsBackupVault(BackupResourceTaggable, AwsResource):
min_retention_days: Optional[int] = field(default=None, metadata={"description": "The Backup Vault Lock setting that specifies the minimum retention period that the vault retains its recovery points. If this parameter is not specified, Vault Lock does not enforce a minimum retention period. If specified, any backup or copy job to the vault must have a lifecycle policy with a retention period equal to or longer than the minimum retention period. If the job's retention period is shorter than that minimum retention period, then the vault fails the backup or copy job, and you should either modify your lifecycle settings or use a different vault. Recovery points already stored in the vault prior to Vault Lock are not affected."}) # fmt: skip
max_retention_days: Optional[int] = field(default=None, metadata={"description": "The Backup Vault Lock setting that specifies the maximum retention period that the vault retains its recovery points. If this parameter is not specified, Vault Lock does not enforce a maximum retention period on the recovery points in the vault (allowing indefinite storage). If specified, any backup or copy job to the vault must have a lifecycle policy with a retention period equal to or shorter than the maximum retention period. If the job's retention period is longer than that maximum retention period, then the vault fails the backup or copy job, and you should either modify your lifecycle settings or use a different vault. Recovery points already stored in the vault prior to Vault Lock are not affected."}) # fmt: skip
lock_date: Optional[datetime] = field(default=None, metadata={"description": "The date and time when Backup Vault Lock configuration becomes immutable, meaning it cannot be changed or deleted. If you applied Vault Lock to your vault without specifying a lock date, you can change your Vault Lock settings, or delete Vault Lock from the vault entirely, at any time. This value is in Unix format, Coordinated Universal Time (UTC), and accurate to milliseconds. For example, the value 1516925490.087 represents Friday, January 26, 2018 12:11:30.087 AM."}) # fmt: skip
vault_policy: Optional[Json] = field(default=None)

@classmethod
def called_collect_apis(cls) -> List[AwsApiSpec]:
return [
cls.api_spec,
AwsApiSpec(service_name, "list-tags"),
AwsApiSpec(service_name, "list-recovery-points-by-backup-vault"),
AwsApiSpec(service_name, "get-backup-vault-access-policy"),
]

@classmethod
Expand Down Expand Up @@ -394,11 +398,23 @@ def add_tags(backup_plan: AwsBackupVault) -> None:
for tag in tags:
backup_plan.tags.update(tag)

def add_vault_policy(vault: AwsBackupVault) -> None:
with builder.suppress(f"{service_name}.get-backup-vault-access-policy"):
if raw_policy := builder.client.get(
service_name,
"get-backup-vault-access-policy",
"Policy",
BackupVaultName=vault.name,
expected_errors=["ResourceNotFoundException"],
):
vault.vault_policy = sort_json(json_loads(raw_policy), sort_list=True) # type: ignore

for js in json:
if instance := cls.from_api(js, builder):
builder.add_node(instance, js)
builder.submit_work(service_name, collect_recovery_points, instance)
builder.submit_work(service_name, add_tags, instance)
builder.submit_work(service_name, add_vault_policy, instance)


@define(eq=False, slots=False)
Expand Down
Loading

0 comments on commit dcccb29

Please sign in to comment.