diff --git a/.yamllint b/.yamllint index 3adaf90..c31f456 100644 --- a/.yamllint +++ b/.yamllint @@ -12,4 +12,7 @@ rules: brackets: max-spaces-inside: 1 level: error + document-start: disable + indentation: disable line-length: disable + truthy: disable diff --git a/gouttelette.yml b/gouttelette.yml new file mode 100644 index 0000000..5980be4 --- /dev/null +++ b/gouttelette.yml @@ -0,0 +1,2 @@ +--- +generator: "amazon_cloud_code_generator" diff --git a/gouttelette/cmd/content_library_data.py b/gouttelette/cmd/content_library_data.py new file mode 100644 index 0000000..0ca63a4 --- /dev/null +++ b/gouttelette/cmd/content_library_data.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python + +content_library_static_ds = { + "{@term field}": "field", + "{@name Vcenter}": "vCenter", + "{@link LibraryModel}": "Library", + "{@term unset}": "not set", + "{@link StorageBacking}": "storage backing", + "{@name LibraryModel}": "Library", + "{@link PublishInfo}": "C(publish_info)", + "{@link PublishInfo#published}": "C(publish_info.published)", + "{@link SubscriptionInfo}": "C(subscription_info)", + "{@link SubscriptionInfo#automaticSyncEnabled}": "C(subscription_info.automaticSyncEnabled)", + "{@link SubscriptionInfo#subscriptionUrl}": "C(subscription_info.subscriptionurl)", + "{@link LibraryModel#name}": "C(Library name)", + "{@link StorageBacking#storageUri}": "C(storage_backings.storage_uri)", + "{@link PublishInfo#persistJsonEnabled}": "C(publish_info.persist_json_enabled)", + "{@link PublishInfo#publishUrl}": "C(publish_info.publish_url)", + "{@link ConfigurationModel#automaticSyncEnabled}": "C(configuration_model.automatic_sync_enabled)", + "{@link SubscribedLibrary#sync}": "M(vmware.vmware_rest.content_subscribedlibrary) with C(state=sync)", + "{@link SubscribedItem#sync}": "M(vmware.vmware_rest.content_library_item) with C(state=sync)", + "{@link SubscribedItem#evict}": "M(vmware.vmware_rest.content_library_item) with C(state=sync)", + "{@link ItemModel}": "item", + "{@link Processes#create}": "process", +} diff --git a/gouttelette/cmd/generator.py b/gouttelette/cmd/generator.py new file mode 100644 index 0000000..3dc865b --- /dev/null +++ b/gouttelette/cmd/generator.py @@ -0,0 +1,401 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# Copyright: (c) 2022, Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + + +import copy +import re +from typing import Iterable, List, Dict +from gouttelette.utils import ( + python_type, + get_module_from_config, + scrub_keys, + camel_to_snake, + ensure_description, +) + + +class Description: + @classmethod + def normalize(cls, string: str, definitions: Iterable = {}) -> List[str]: + with_no_line_break: List[str] = [] + sentences = re.split(r"(?<=[^A-Z].[.?]) +(?=[A-Z])", string) + sentences[:] = [x for x in sentences if x] + + for line in sentences: + if "\n" in line: + splitted = line.split("\n") + splitted[:] = [x for x in splitted if x] + with_no_line_break += splitted + else: + with_no_line_break.append(line) + + with_no_line_break = [cls.clean_up(definitions, i) for i in with_no_line_break] + + return with_no_line_break + + @classmethod + def clean_up(cls, definitions: Iterable, my_string: str) -> str: + values = set() + keys = set() + + ignored_values = set(["PUT", "S3", "EC2"]) + + def get_keys(a_dict): + keys = [] + if isinstance(a_dict, list): + for item in a_dict: + keys.extend(get_keys(item)) + elif isinstance(a_dict, dict): + for key in a_dict: + keys.append(key) + keys.extend(get_keys(a_dict[key])) + return keys + + def get_values(a_dict): + for key, value in a_dict.items(): + if isinstance(value, dict): + yield from get_values(value) + else: + if key in ("choices", "enum"): + yield value + + for value in get_values(definitions): + values |= set(value) + + keys = set(get_keys(definitions)) + + def rewrite_name(matchobj): + """Rewrite option name to I(camel_to_snake(option))""" + name = matchobj.group(0) + if name in keys: + snake_name = camel_to_snake(name) + output = f"I({snake_name})" + return output + return name + + def rewrite_value(matchobj): + """Find link and replace it with U(link)""" + name = matchobj.group(0) + if name.isalpha(): + if name in values and name not in ignored_values: + output = f"C({name})" + return output + else: + if name not in ignored_values: + output = f"C({name})" + return output + return name + + def rewrite_link(matchobj): + """Find link and replace it with U(link).""" + name = matchobj.group(0) + output = f"U({name})" + return output + + def find_match(pattern, my_string): + """Find matching string using a pattern and rewrite it as needed.""" + matches = re.findall(pattern, my_string) + if matches: + output = re.sub(r"\d+", rewrite_value, my_string) + output = re.sub(r"(? Iterable: + list_of_keys_to_remove = [ + "additionalProperties", + "insertionOrder", + "uniqueItems", + "pattern", + "examples", + "maxLength", + "minLength", + "format", + "minimum", + "maximum", + "patternProperties", + "maxItems", + "minItems", + ] + self.replace_keys(self.options, self.definitions) + self.cleanup_required(self.options) + sanitized_options: Iterable = camel_to_snake( + scrub_keys(self.options, list_of_keys_to_remove) + ) + + """ + For all the options with a missing description field returned by the API + we make sure to add "description": "Not Provided." to allow + ansible-doc -t module amazon.cloud.module_name to succeed. + + Without this workaround, sanity tests fail with (even if the ignore files + are populated with "validate-modules:invalid-documentation"): + + >>> Standard Error + ERROR! Unable to retrieve documentation from 'amazon.cloud.module_name' due to: + All (sub-)options and return values must have a 'description' field + """ + sanitized_options: Iterable = ensure_description( + sanitized_options, "description" + ) + + return sanitized_options + + +def generate_documentation( + module: object, added_ins: Dict, next_version: str, target_dir: str +) -> Iterable: + """Format and generate the AnsibleModule documentation""" + + module_name = module.name + documentation: Iterable = { + "module": module_name, + "author": "Ansible Cloud Team (@ansible-collections)", + "description": [], + "short_description": [], + "options": {}, + "version_added": added_ins["module"] or next_version, + "extends_documentation_fragment": [ + "amazon.aws.aws", + "amazon.aws.ec2", + "amazon.cloud.boto3", + ], + } + + docs = Documentation() + docs.options = module.schema.get("properties", {}) + docs.definitions = module.schema.get("definitions", {}) + + # Properties defined as required must be specified in the desired state during resource creation + docs.required = module.schema.get("required", []) + + # Properties defined as readOnlyProperties can't be set by users + docs.read_only_properties = module.schema.get("readOnlyProperties", []) + + docs.primary_identifier = module.schema.get("primaryIdentifier", []) + + # Properties defined as writeOnlyProperties can be specified by users when creating or updating a + # resource but can't be returned during a read or list requested + # write_only_properties = module.schema.get("readOnlyProperties") + + documentation["options"] = docs.preprocess() + documentation["options"].update( + { + "state": { + "description": [ + "Goal state for resource.", + "I(state=present) creates the resource if it doesn't exist, or updates to the provided state if the resource already exists.", + "I(state=absent) ensures an existing instance is deleted.", + "I(state=list) get all the existing resources.", + "I(state=describe) or I(state=get) retrieves information on an existing resource.", + ], + "type": "str", + "choices": ["present", "absent", "list", "describe", "get"], + "default": "present", + }, + "wait": { + "description": ["Wait for operation to complete before returning."], + "type": "bool", + "default": False, + }, + "wait_timeout": { + "description": [ + "How many seconds to wait for an operation to complete before timing out.", + ], + "type": "int", + "default": 320, + }, + "force": { + "description": [ + "Cancel IN_PROGRESS and PENDING resource requestes.", + "Because you can only perform a single operation on a given resource at a time, there might be cases where you need to cancel the current resource operation to make the resource available so that another operation may be performed on it.", + ], + "type": "bool", + "default": False, + }, + } + ) + + # module.schema.get("taggable") is not returned always (even if the resource supports tagging) + if module.schema.get("taggable") or documentation["options"].get("tags"): + documentation["options"]["tags"] = { + "description": [ + "A dict of tags to apply to the resource.", + "To remove all tags set I(tags={}) and I(purge_tags=true).", + ], + "type": "dict", + "aliases": ["resource_tags"], + } + documentation["options"]["purge_tags"] = { + "description": ["Remove tags not listed in I(tags)."], + "type": "bool", + "default": True, + } + + if len(docs.primary_identifier) > 1: + # If a resource has more than one primary identifier, the user can decide to either + # specify all the primary identifiers or use the identifier parameter as a string + # consisting of the multiple identifiers strung together + # https://docs.aws.amazon.com/cloudcontrolapi/latest/userguide/resource-identifier.html + documentation["options"]["identifier"] = { + "description": [ + "For compound primary identifiers, to specify the primary identifier as a string, list each in the order that they are specified in the identifier list definition, separated by '|'.", + "For more details, visit U(https://docs.aws.amazon.com/cloudcontrolapi/latest/userguide/resource-identifier.html).", + ], + "type": "str", + } + + module_from_config = get_module_from_config(module_name, target_dir) + if module_from_config and "documentation" in module_from_config: + for k, v in module_from_config["documentation"].items(): + documentation[k] = v + + return documentation + + +class CloudFormationWrapper: + """Encapsulates Amazon CloudFormation operations.""" + + def __init__(self, client): + """ + :param client: A Boto3 CloudFormation client + """ + self.client = client + + def generate_docs(self, type_name: str): + """ + Equivalent to + aws cloudformation describe-type \ + --type-name My::Logs::LogGroup \ + --type RESOURCE + """ + # TODO: include version + response = self.client.describe_type(Type="RESOURCE", TypeName=type_name) + + return response.get("Schema") diff --git a/gouttelette/cmd/refresh_modules.py b/gouttelette/cmd/refresh_modules.py new file mode 100644 index 0000000..3ac2c82 --- /dev/null +++ b/gouttelette/cmd/refresh_modules.py @@ -0,0 +1,1350 @@ +#!/usr/bin/env python3 + + +import argparse +import json + +import pathlib +import re +import shutil +import pkg_resources +from pbr.version import VersionInfo +from .content_library_data import content_library_static_ds +import yaml +import json +import copy + +from gouttelette.utils import ( + format_documentation, + indent, + UtilsBase, + jinja2_renderer, + get_module_added_ins, + get_module_from_config, + python_type, + get_generator, + camel_to_snake, + ignore_description, +) + +from typing import Dict, Iterable, List, DefaultDict, Union, Optional, TypeVar, Type + +from .resources import RESOURCES +from .generator import generate_documentation + + +# vmware specific +def normalize_parameter_name(name: str): + # the in-query filter.* parameters are not valid Python variable names. + # We replace the . with a _ to avoid problem, + return name.replace("filter.", "filter_") # < 7.0.2 + + +def ansible_state(operationId: str, default_operationIds: Optional[str] = None) -> str: + mapping = { + "update": "present", + "delete": "absent", + "create": "present", + } + # in this case, we don't want to see 'create' in the + # "Required with" listi + if ( + default_operationIds + and operationId == "update" + and "create" not in default_operationIds + ): + return + if operationId in mapping: + return mapping[operationId] + else: + return operationId + + +class_description = TypeVar("class_description", bound="Description") + + +class Description: + @classmethod + def normalize(cls: Type[class_description], string_list: List) -> List: + if not isinstance(string_list, list): + raise TypeError + + with_no_line_break = [] + for l in string_list: + if "\n" in l: + with_no_line_break += l.split("\n") + else: + with_no_line_break.append(l) + + with_no_line_break = [cls.write_M(i) for i in with_no_line_break] + with_no_line_break = [cls.write_I(i) for i in with_no_line_break] + with_no_line_break = [cls.clean_up(i) for i in with_no_line_break] + return with_no_line_break + + @classmethod + def clean_up(cls: Type[class_description], my_string: str) -> str: + def rewrite_name(matchobj): + name = matchobj.group(1) + snake_name = cls.to_snake(name) + if snake_name[0] == "#": # operationId: + output = f"C({ansible_state(snake_name[1:])})" + output = f"C({snake_name})" + return output + + def rewrite_link(matchobj: str) -> str: + name = matchobj.group(1) + if "#" in name and name.split("#")[0]: + output = name.split("#")[1] + else: + output = name + return output + + my_string = my_string.replace(" {@term enumerated type}", "") + my_string = my_string.replace(" {@term list}", "list") + my_string = my_string.replace(" {@term operation}", "operation") + my_string = re.sub(r"{@name DayOfWeek}", "day of the week", my_string) + my_string = re.sub(r": The\s\S+\senumerated type", ": This option", my_string) + my_string = re.sub(r"

", " ", my_string) + my_string = re.sub(r" See {@.*}.", "", my_string) + my_string = re.sub(r"\({@.*?\)", "", my_string) + my_string = re.sub(r"{@code true}", "C(True)", my_string) + my_string = re.sub(r"{@code false}", "C(False)", my_string) + my_string = re.sub(r"{@code\s+?(.*?)}", r"C(\1)", my_string) + my_string = re.sub(r"{@param.name\s+?([^}]*)}", rewrite_name, my_string) + my_string = re.sub(r"{@name\s+?([^}]*)}", rewrite_name, my_string) + # NOTE: it's pretty much impossible to build something useful + # automatically. + # my_string = re.sub(r"{@link\s+?([^}]*)}", rewrite_link, my_string) + for k in content_library_static_ds: + my_string = re.sub(k, content_library_static_ds[k], my_string) + return my_string + + @classmethod + def to_snake(cls: Type[class_description], camel_case: str) -> str: + camel_case = camel_case.replace("DNS", "dns") + return re.sub(r"(? str: + splitted = ref.split(".") + my_parameter = splitted[-1].replace("-", "_") + return cls.to_snake(my_parameter) + + @classmethod + def write_I(cls: Type[class_description], my_string: str) -> str: + refs = { + cls.ref_to_parameter(i): i + for i in re.findall(r"[A-Z][\w+]+\.[A-Z][\w+\.-]+", my_string) + } + for parameter_name in sorted(refs.keys(), key=len, reverse=True): + ref = refs[parameter_name] + my_string = my_string.replace(ref, f"I({parameter_name})") + return my_string + + @classmethod + def write_M(cls: Type[class_description], my_string: str) -> str: + my_string = re.sub(r"When operations return.*\.($|\s)", "", my_string) + m = re.search(r"resource type:\s([a-zA-Z][\w\.]+[a-z])", my_string) + mapping = { + "ClusterComputeResource": "vcenter_cluster_info", + "Datacenter": "vcenter_datacenter_info", + "Datastore": "vcenter_datastore_info", + "Folder": "vcenter_folder_info", + "HostSystem": "vcenter_host_info", + "Network": "vcenter_network_info", + "ResourcePool": "vcenter_resourcepool_info", + "vcenter.StoragePolicy": "vcenter_storage_policies", + "vcenter.vm.hardware.Cdrom": "vcenter_vm_hardware_cdrom", + "vcenter.vm.hardware.Disk": "vcenter_vm_hardware_disk", + "vcenter.vm.hardware.Ethernet": "vcenter_vm_hardware_ethernet", + "vcenter.vm.hardware.Floppy": "vcenter_vm_hardware_floppy", + "vcenter.vm.hardware.ParallelPort": "vcenter_vm_hardware_parallel", + "vcenter.vm.hardware.SataAdapter": "vcenter_vm_hardware_adapter_sata", + "vcenter.vm.hardware.ScsiAdapter": "vcenter_vm_hardware_adapter_scsi", + "vcenter.vm.hardware.SerialPort": "vcenter_vm_hardware_serial", + "VirtualMachine": "vcenter_vm_info", + "infraprofile.profile": "appliance_infraprofile_configs", + "appliance.vmon.Service": "appliance_vmon_service", + } + + if not m: + return my_string + + resource_name = m.group(1) + try: + module_name = mapping[resource_name] + except KeyError: + print(f"No mapping for {resource_name}") + raise + + if f"must be an identifier for the resource type: {resource_name}" in my_string: + return my_string.replace( + f"must be an identifier for the resource type: {resource_name}", + f"must be the id of a resource returned by M(vmware.vmware_rest.{module_name})", + ) + if f"identifiers for the resource type: {resource_name}" in my_string: + return my_string.replace( + f"identifiers for the resource type: {resource_name}", + f"the id of resources returned by M(vmware.vmware_rest.{module_name})", + ).rstrip() + + +def gen_documentation( + name: str, + description: str, + parameters: List, + added_ins: Dict, + next_version: str, + target_dir: str, +) -> Dict: + + short_description = description.split(". ")[0] + documentation = { + "author": ["Ansible Cloud Team (@ansible-collections)"], + "description": description, + "module": name, + "notes": ["Tested on vSphere 7.0.2"], + "options": { + "vcenter_hostname": { + "description": [ + "The hostname or IP address of the vSphere vCenter", + "If the value is not specified in the task, the value of environment variable C(VMWARE_HOST) will be used instead.", + ], + "type": "str", + "required": True, + }, + "vcenter_username": { + "description": [ + "The vSphere vCenter username", + "If the value is not specified in the task, the value of environment variable C(VMWARE_USER) will be used instead.", + ], + "type": "str", + "required": True, + }, + "vcenter_password": { + "description": [ + "The vSphere vCenter password", + "If the value is not specified in the task, the value of environment variable C(VMWARE_PASSWORD) will be used instead.", + ], + "type": "str", + "required": True, + }, + "vcenter_validate_certs": { + "description": [ + "Allows connection when SSL certificates are not valid. Set to C(false) when certificates are not trusted.", + "If the value is not specified in the task, the value of environment variable C(VMWARE_VALIDATE_CERTS) will be used instead.", + ], + "type": "bool", + "default": True, + }, + "vcenter_rest_log_file": { + "description": [ + "You can use this optional parameter to set the location of a log file. ", + "This file will be used to record the HTTP REST interaction. ", + "The file will be stored on the host that run the module. ", + "If the value is not specified in the task, the value of ", + "environment variable C(VMWARE_REST_LOG_FILE) will be used instead.", + ], + "type": "str", + }, + "session_timeout": { + "description": [ + "Timeout settings for client session. ", + "The maximal number of seconds for the whole operation including connection establishment, request sending and response. ", + "The default value is 300s.", + ], + "type": "float", + "version_added": "2.1.0", + }, + }, + "requirements": ["vSphere 7.0.2 or greater", "python >= 3.6", "aiohttp"], + "short_description": short_description, + "version_added": next_version, + } + + # Note: this series of if block is overcomplicated and should + # be refactorized. + for parameter in parameters: + if parameter["name"] == "action": + continue + normalized_name = normalize_parameter_name(parameter["name"]) + description = [] + option = {} + if parameter.get("required"): + option["required"] = True + if parameter.get("aliases"): + option["aliases"] = parameter.get("aliases") + if parameter.get("description"): + description.append(parameter["description"]) + if parameter.get("subkeys"): + description.append("Valid attributes are:") + for sub_k, sub_v in parameter.get("subkeys").items(): + sub_v["type"] = python_type(sub_v["type"]) + states = sorted(set([ansible_state(o) for o in sub_v["_operationIds"]])) + required_with_operations = sorted( + set([ansible_state(o) for o in sub_v["_required_with_operations"]]) + ) + description.append( + " - C({name}) ({type}): {description} ({states})".format( + **sub_v, states=states + ) + ) + if required_with_operations: + description.append( + f" This key is required with {required_with_operations}." + ) + if "enum" in sub_v: + description.append(" - Accepted values:") + for i in sorted(sub_v["enum"]): + description.append(f" - {i}") + if "properties" in sub_v: + description.append(" - Accepted keys:") + for i, v in sub_v["properties"].items(): + description.append( + f" - {i} ({v['type']}): {v['description']}" + ) + if v.get("enum"): + description.append("Accepted value for this field:") + for val in sorted(v.get("enum")): + description.append(f" - C({val})") + + option["description"] = list(Description.normalize(description)) + option["type"] = python_type(parameter["type"]) + if "enum" in parameter: + option["choices"] = sorted(parameter["enum"]) + if parameter["type"] == "array": + option["elements"] = python_type(parameter["elements"]) + if parameter.get("default"): + option["default"] = parameter.get("default") + + documentation["options"][normalized_name] = option + parameter["added_in"] = next_version + + module_from_config = get_module_from_config(name, target_dir) + if module_from_config and "documentation" in module_from_config: + for k, v in module_from_config["documentation"].items(): + documentation[k] = v + return documentation + + +def path_to_name(path: str) -> str: + _path = path.lstrip("/").split("?")[0] + elements = [] + keys = [] + for i in _path.split("/"): + if "{" in i: + keys.append(i) + elif len(keys) > 1: + # action for a submodule, we gather these end-points in the main module + continue + else: + elements.append(i) + + # workaround for vcenter_vm_power and appliance_services, appliance_shutdown, appliance_system_storage + if elements[-1] in ( + "stop", + "start", + "restart", + "suspend", + "reset", + "cancel", + "poweroff", + "reboot", + "resize", + ): + elements = elements[:-1] + if elements[0:3] == ["rest", "com", "vmware"]: + elements = elements[3:] + elif elements[0:2] == ["rest", "hvc"]: + elements = elements[1:] + elif elements[0:2] == ["rest", "appliance"]: + elements = elements[1:] + elif elements[0:2] == ["rest", "vcenter"]: + elements = elements[1:] + elif elements[0:2] == ["rest", "api"]: + elements = elements[2:] + elif elements[:1] == ["api"]: + elements = elements[1:] + + module_name = "_".join(elements) + return module_name.replace("-", "") + + +def gen_arguments_py(parameters: List, list_index=None) -> str: + result = "" + for parameter in parameters: + name = normalize_parameter_name(parameter["name"]) + values = [] + + if name in ["user_name", "username", "encryption_key", "client_token"]: + values.append("'no_log': True") + elif "password" in name: + values.append("'no_log': True") + + if parameter.get("required"): + values.append("'required': True") + + aliases = parameter.get("aliases") + if aliases: + values.append(f"'aliases': {aliases}") + + _type = python_type(parameter["type"]) + values.append(f"'type': '{_type}'") + if "enum" in parameter: + choices = ", ".join([f"'{i}'" for i in sorted(parameter["enum"])]) + values.append(f"'choices': [{choices}]") + if python_type(parameter["type"]) == "list": + _elements = python_type(parameter["elements"]) + values.append(f"'elements': '{_elements}'") + + # "bus" option defaulting on 0 + if name == "bus": + values.append("'default': 0") + elif "default" in parameter: + default = parameter["default"] + values.append(f"'default': '{default}'") + + result += f"\nargument_spec['{name}'] = " + result += "{" + ", ".join(values) + "}" + return result + + +def flatten_ref(tree: any, definitions: Iterable) -> any: + if isinstance(tree, str): + if tree.startswith("#/definitions/"): + raise Exception("TODO") + return definitions.get(tree) + if isinstance(tree, list): + return [flatten_ref(i, definitions) for i in tree] + if tree is None: + return {} + for k in tree: + v = tree[k] + if k == "$ref": + dotted = v.split("/")[2] + if dotted in ["vapi.std.localization_param", "VapiStdLocalizationParam"]: + # to avoid an endless loop with + # vapi.std.nested_localizable_message + return {"go_to": "vapi.std.localization_param"} + definition = definitions.get(dotted) + data = flatten_ref(definition, definitions) + if "description" not in data and "description" in tree: + data["description"] = tree["description"] + return data + elif isinstance(v, dict): + tree[k] = flatten_ref(v, definitions) + else: + pass + return tree + + +class Resource: + def __init__(self, name: str): + self.name = name + self.operations = {} + self.summary = {} + + +# amazon.cloud specific +def generate_params(definitions: Iterable) -> str: + params: str = "" + keys = sorted( + definitions.keys() - ["wait", "wait_timeout", "state", "purge_tags", "force"] + ) + for key in keys: + params += f"\nparams['{key}'] = module.params.get('{key}')" + + return params + + +def gen_mutually_exclusive(schema: Dict) -> List: + primary_idenfifier = schema.get("primaryIdentifier", []) + entries: List = [] + + if len(primary_idenfifier) > 1: + entries.append([tuple(primary_idenfifier), "identifier"]) + + return entries + + +def ensure_all_identifiers_defined(schema: Dict) -> str: + primary_idenfifier = schema.get("primaryIdentifier", []) + new_content: str = "if state in ('present', 'absent', 'get', 'describe') and module.params.get('identifier') is None:\n" + new_content += 8 * " " + new_content += f"if not module.params.get('{primary_idenfifier[0]}')" + " ".join( + map(lambda x: f" or not module.params.get('{x}')", primary_idenfifier[1:]) + ) + new_content += ":\n" + 12 * " " + new_content += ( + "module.fail_json(f'You must specify both {*identifier, } identifiers.')\n" + ) + + return new_content + + +def generate_argument_spec(options: Dict) -> str: + argument_spec: str = "" + options_copy = copy.deepcopy(options) + + for key in options_copy.keys(): + ignore_description(options_copy[key]) + + for key in options_copy.keys(): + argument_spec += f"\nargument_spec['{key}'] = " + argument_spec += str(options_copy[key]) + + argument_spec = argument_spec.replace("suboptions", "options") + + return argument_spec + + +# common procs +def gen_required_if(schema: Union[List, Dict]) -> List: + if isinstance(schema, dict): + primary_idenfifier = schema.get("primaryIdentifier", []) + required = schema.get("required", []) + entries: List = [] + states = ["absent", "get"] + + _primary_idenfifier = copy.copy(primary_idenfifier) + + # For compound primary identifiers consisting of multiple resource properties strung together, + # use the property values in the order that they are specified in the primary identifier definition + if len(primary_idenfifier) > 1: + entries.append(["state", "list", primary_idenfifier[:-1], True]) + _primary_idenfifier.append("identifier") + + entries.append( + [ + "state", + "present", + list(set([*_primary_idenfifier, *required])), + True, + ] + ) + [ + entries.append(["state", state, _primary_idenfifier, True]) + for state in states + ] + else: + by_states = DefaultDict(list) + for parameter in schema: + for operation in parameter.get("_required_with_operations", []): + by_states[ansible_state(operation)].append(parameter["name"]) + entries = [] + for operation, fields in by_states.items(): + state = ansible_state(operation) + if "state" in entries: + entries.append(["state", state, sorted(set(fields)), True]) + + return entries + + +# Classes +class AnsibleModuleBaseAmazon(UtilsBase): + template_file = "default_module.j2" + + def __init__(self, schema: Iterable): + self.schema = schema + self.name = self.generate_module_name() + + def generate_module_name(self) -> str: + splitted = self.schema.get("typeName").split("::") + prefix = splitted[1].lower() + list_to_str = "".join(map(str, splitted[2:])) + return prefix + "_" + camel_to_snake(list_to_str) + + def renderer(self, target_dir: str, next_version: str): + added_ins = get_module_added_ins(self.name, git_dir=target_dir / ".git") + documentation = generate_documentation( + self, + added_ins, + next_version, + target_dir, + ) + + arguments = generate_argument_spec(documentation["options"]) + documentation_to_string = format_documentation(documentation) + + content = jinja2_renderer( + self.template_file, + arguments=indent(arguments, 4), + documentation=documentation_to_string, + name=self.name, + resource_type=f"'{self.schema.get('typeName')}'", + params=indent(generate_params(documentation["options"]), 4), + primary_identifier=self.schema["primaryIdentifier"], + required_if=gen_required_if(self.schema), + mutually_exclusive=gen_mutually_exclusive(self.schema), + ensure_all_identifiers_defined=ensure_all_identifiers_defined(self.schema) + if len(self.schema["primaryIdentifier"]) > 1 + else "", + create_only_properties=self.schema.get("createOnlyProperties", {}), + handlers=list(self.schema.get("handlers", {}).keys()), + ) + + self.write_module(target_dir, content) + + +class AnsibleModuleBaseVmware(UtilsBase): + template_file = "default_module.j2" + + def __init__(self, resource: str, definitions: any): + self.resource = resource + self.definitions = definitions + self.name = resource.name + self.default_operationIds = set(list(self.resource.operations.keys())) - set( + ["get", "list"] + ) + + def description(self) -> str: + prefered_operationId = ["get", "list", "create", "get", "set"] + for operationId in prefered_operationId: + if operationId not in self.default_operationIds: + continue + if operationId in self.resource.summary: + return self.resource.summary[operationId].split("\n")[0] + + for operationId in sorted(self.default_operationIds): + if operationId in self.resource.summary: + return self.resource.summary[operationId].split("\n")[0] + + print(f"generic description: {self.name}") + return f"Handle resource of type {self.name}" + + def get_path(self) -> str: + return list(self.resource.operations.values())[0][1] + + def list_index(self) -> any: + for i in ["get", "update", "delete"]: + if i not in self.resource.operations: + continue + path = self.resource.operations[i][1] + break + else: + return + + m = re.search(r"{([-\w]+)}$", path) + if m: + return m.group(1) + + def payload(self) -> Dict: + """ "Return a structure that describe the format of the data to send back.""" + payload = {} + # for operationId in self.resource.operations: + for operationId in self.default_operationIds: + if operationId not in self.resource.operations: + continue + payload[operationId] = {"query": {}, "body": {}, "path": {}} + payload_info = {} + for parameter in AnsibleModuleBaseVmware._property_to_parameter( + self.resource.operations[operationId][2], self.definitions, operationId + ): + _in = parameter["in"] or "body" + + payload_info = parameter["_loc_in_payload"] + payload[operationId][_in][parameter["name"]] = payload_info + return payload + + def answer(self) -> any: + # This is arguably not super elegant. The list outputs just include a summary of the resources, + # with this little transformation, we get access to the full item + output_format = None + for i in ["list", "get"]: + if i in self.resource.operations: + output_format = self.resource.operations[i][3]["200"] + if not output_format: + return + + if "items" in output_format["schema"]: + ref = ( + output_format["schema"]["items"] + .get("$ref", "") + .replace("Summary", "Info") + ) + elif "schema" in output_format: + ref = output_format["schema"].get("$ref") + else: + ref = output_format.get("$ref") + + if not ref: + return + try: + raw_answer = flatten_ref({"$ref": ref}, self.definitions) + except KeyError: + return + if "properties" in raw_answer: + return raw_answer["properties"].keys() + + def parameters(self) -> Iterable: + def sort_operationsid(input: Iterable) -> Iterable: + output = sorted(input) + if "create" in output: + output = ["create"] + output + return output + + results = {} + for operationId in sort_operationsid(self.default_operationIds): + if operationId not in self.resource.operations: + continue + + for parameter in AnsibleModuleBaseVmware._property_to_parameter( + self.resource.operations[operationId][2], self.definitions, operationId + ): + name = parameter["name"] + if name not in results: + results[name] = parameter + results[name]["operationIds"] = [] + results[name]["_required_with_operations"] = [] + + # Merging two parameters, for instance "action" in + # /rest/vcenter/vm-template/library-items/{template_library_item}/check-outs + # and + # /rest/vcenter/vm-template/library-items/{template_library_item}/check-outs/{vm} + if "description" not in parameter: + pass + elif "description" not in results[name]: + results[name]["description"] = parameter.get("description") + elif results[name]["description"] != parameter.get("description"): + # We can hardly merge two description strings and + # get magically something meaningful + if len(parameter["description"]) > len( + results[name]["description"] + ): + results[name]["description"] = parameter["description"] + if "enum" in parameter: + results[name]["enum"] += parameter["enum"] + results[name]["enum"] = sorted(set(results[name]["enum"])) + + results[name]["operationIds"].append(operationId) + results[name]["operationIds"].sort() + if "subkeys" in parameter: + if "subkeys" not in results[name]: + results[name]["subkeys"] = {} + for sub_k, sub_v in parameter["subkeys"].items(): + if sub_k in results[name]["subkeys"]: + results[name]["subkeys"][sub_k][ + "_required_with_operations" + ] += sub_v["_required_with_operations"] + results[name]["subkeys"][sub_k]["_operationIds"] += sub_v[ + "_operationIds" + ] + results[name]["subkeys"][sub_k]["description"] = sub_v[ + "description" + ] + else: + results[name]["subkeys"][sub_k] = sub_v + + if parameter.get("required"): + results[name]["_required_with_operations"].append(operationId) + + answer_fields = self.answer() + # Note: If the final result comes with a "label" field, we expose a "label" + # parameter. We will use the field to identify an existing resource. + if answer_fields and "label" in answer_fields: + results["label"] = { + "type": "str", + "name": "label", + "description": "The name of the item", + } + + for name, result in results.items(): + if result.get("enum"): + result["enum"] = sorted(set(result["enum"])) + if result.get("required"): + if ( + len(set(self.default_operationIds) - set(result["operationIds"])) + > 0 + ): + + required_with = [] + for i in result["operationIds"]: + state = ansible_state(i, self.default_operationIds) + if state: + required_with.append(state) + result["description"] += " Required with I(state={})".format( + sorted(set(required_with)) + ) + del result["required"] + else: + result["description"] += " This parameter is mandatory." + + states = [] + for operation in sorted(list(self.default_operationIds)): + if operation in ["create", "update"]: + states.append("present") + elif operation == "delete": + states.append("absent") + else: + states.append(operation) + + results["state"] = { + "name": "state", + "type": "str", + "enum": sorted(set(states)), + } + if "present" in states: + results["state"]["default"] = "present" + elif "set" in states: + results["state"]["default"] = "set" + elif states: + results["state"]["required"] = True + + # There is just one possible operation, we remove the "state" parameter + if len(self.resource.operations) == 1: + del results["state"] + + # Suppport pre 7.0.2 filters + if "list" in self.default_operationIds or "get" in self.default_operationIds: + for i in ["datacenters", "folders", "names"]: + if i in results and results[i]["type"] == "array": + results[i]["aliases"] = [f"filter_{i}"] + if "type" in results and results["type"]["type"] == "string": + results["type"]["aliases"] = ["filter_type"] + if "types" in results and results["types"]["type"] == "array": + results["types"]["aliases"] = ["filter_types"] + + return sorted(results.values(), key=lambda item: item["name"]) + + def gen_required_if(self, parameters: List) -> List: + by_states = DefaultDict(list) + for parameter in parameters: + for operation in parameter.get("_required_with_operations", []): + by_states[ansible_state(operation)].append(parameter["name"]) + entries = [] + for operation, fields in by_states.items(): + state = ansible_state(operation) + if "state" in entries: + entries.append(["state", state, sorted(set(fields)), True]) + return entries + + @staticmethod + def _property_to_parameter( + prop_struct: any, definitions: Iterable, operationId: any + ) -> Iterable: + properties = flatten_ref(prop_struct, definitions) + + def get_next(properties: List) -> Iterable: + required_keys = [] + for i, v in enumerate(properties): + required = v.get("required") + if "schema" in v: + if "properties" in v["schema"]: + properties[i] = v["schema"]["properties"] + if "required" in v["schema"]: + required_keys = v["schema"]["required"] + elif "additionalProperties" in v["schema"]: + properties[i] = v["schema"]["additionalProperties"][ + "properties" + ] + + for i, v in enumerate(properties): + # appliance_health_messages + if isinstance(v, str): + yield v, {}, [], [] + + elif "spec" in v and "properties" in v["spec"]: + required_keys = required_keys or [] + if "required" in v["spec"]: + required_keys = v["spec"]["required"] + for name, property in v["spec"]["properties"].items(): + yield name, property, ["spec"], name in required_keys + + elif isinstance(v, dict): + if not isinstance(v, dict): + continue + # {'type': 'string', 'required': True, 'in': 'path', 'name': 'datacenter', 'description': 'Identifier of the datacenter.'} + if "name" in v and "in" in v and v.get("in") in ["path", "query"]: + yield v["name"], v, [], v.get("required") + # elif "name" in v and isinstance(v["name", dict]): + # yield v["name"], v, [], v.get("required") + else: + for k, data in v.items(): + if isinstance(data, dict): + yield k, data, [], k in required_keys or data.get( + "required" + ) + + parameters = [] + + for name, v, parent, required in get_next(properties): + if name == "request_body": + raise ValueError() + parameter = { + "name": name, + "type": v.get("type", "str"), # 'str' by default, should be ok + "description": v.get("description", ""), + "required": required, + "_loc_in_payload": "/".join(parent + [name]), + "in": v.get("in"), + } + if "enum" in v: + parameter["enum"] = sorted(set(v["enum"])) + + sub_items = None + required_subkeys = v.get("required", []) + + if "properties" in v: + sub_items = v["properties"] + if "required" in v["properties"]: # NOTE: do we still need these + required_subkeys = v["properties"]["required"] + elif "items" in v and "properties" in v["items"]: + sub_items = v["items"]["properties"] + if "required" in v["items"]: # NOTE: do we still need these + required_subkeys = v["items"]["required"] + elif "items" in v and "name" not in v["items"]: + parameter["elements"] = v["items"].get("type", "str") + elif "items" in v and v["items"]["name"]: + sub_items = v["items"] + + if sub_items: + subkeys = {} + for sub_k, sub_v in sub_items.items(): + subkey = { + "name": sub_k, + "type": sub_v["type"], + "description": sub_v.get("description", ""), + "_required_with_operations": [operationId] + if sub_k in required_subkeys + else [], + "_operationIds": [operationId], + } + if "enum" in sub_v: + subkey["enum"] = sub_v["enum"] + if "properties" in sub_v: + subkey["properties"] = sub_v["properties"] + subkeys[sub_k] = subkey + parameter["subkeys"] = subkeys + parameter["elements"] = "dict" + parameters.append(parameter) + + return sorted( + parameters, key=lambda item: (item["name"], item.get("description")) + ) + + def list_path(self) -> any: + list_path = None + if "list" in self.resource.operations: + list_path = self.resource.operations["list"][1] + + return list_path + + def renderer(self, target_dir: str, next_version: str): + + added_ins = {} # get_module_added_ins(self.name, git_dir=target_dir / ".git") + arguments = gen_arguments_py(self.parameters(), self.list_index()) + documentation = format_documentation( + gen_documentation( + self.name, + self.description(), + self.parameters(), + added_ins, + next_version, + target_dir, + ) + ) + required_if = gen_required_if(self.parameters()) + + content = jinja2_renderer( + self.template_file, + arguments=indent(arguments, 4), + documentation=documentation, + list_index=self.list_index(), + list_path=self.list_path(), + name=self.name, + operations=self.resource.operations, + path=self.get_path(), + payload_format=self.payload(), + required_if=required_if, + ) + + self.write_module(target_dir, content) + + +class AnsibleInfoModule(AnsibleModuleBaseVmware): + def __init__(self, resource: any, definitions: any): + super().__init__(resource, definitions) + self.name = resource.name + "_info" + self.default_operationIds = ["get", "list"] + + def parameters(self) -> List: + return [i for i in list(super().parameters()) if i["name"] != "state"] + + +class AnsibleInfoNoListModule(AnsibleInfoModule): + template_file = "info_no_list_module.j2" + + +class AnsibleInfoListOnlyModule(AnsibleInfoModule): + template_file = "info_list_and_get_module.j2" + + +class Definitions: + def __init__(self, data: any): + super().__init__() + self.definitions = data + + def get(self, ref: any) -> any: + if isinstance(ref, dict): + # TODO: standardize the input to avoid this step + dotted = ref["$ref"].split("/")[2] + else: + dotted = ref + + try: + definition = self.definitions[dotted] + except KeyError: + definition = self.definitions["com.vmware." + dotted] + + if definition is None: + raise Exception("Cannot find ref for {ref}") + + return definition + + +class Path: + def __init__(self, path: str, value: any): + super().__init__() + self.path = path + self.operations = {} + self.verb = {} + self.value = value + + def summary(self, verb: str) -> str: + return self.value[verb]["summary"] + + def is_tech_preview(self) -> bool: + for verb in self.value.keys(): + if "Technology Preview" in self.summary(verb): + return True + return False + + +class SwaggerFile: + def __init__(self, raw_content: any): + super().__init__() + self.resources = {} + json_content = json.loads(raw_content) + self.definitions = Definitions(json_content["definitions"]) + self.paths = self.load_paths(json_content["paths"]) + + @staticmethod + def load_paths(paths: str) -> Dict: + result = {} + + for path in [Path(p, v) for p, v in paths.items()]: + if path.is_tech_preview(): + continue + result[path.path] = path + for verb, desc in path.value.items(): + operationId = desc["operationId"] + if desc.get("deprecated"): + continue + try: + parameters = desc["parameters"] + except KeyError: + print(f"No parameters for {operationId} {path.path}") + if path.path.startswith("/rest/vcenter/vm/{vm}/tools"): + if operationId == "upgrade": + print(f"Skipping {path.path} upgrade (broken)") + continue + if path.path == "/api/appliance/infraprofile/configs": + if operationId == "validate$task": + print(f"Skipping {path.path} upgrade (broken)") + continue + path.operations[operationId] = ( + verb, + path.path, + parameters, + desc["responses"], + ) + return result + + @staticmethod + def init_resources(paths: str) -> Dict: + resources = {} + for path in paths: + if "vmw-task=true" in path.path: + continue + + name = path_to_name(path.path) + if name == "esx_settings_clusters_software_drafts": + continue + if name not in resources: + resources[name] = Resource(name) + + for operationId, v in path.operations.items(): + verb = v[0] + resources[name].summary[operationId] = path.summary(verb) + if operationId in resources[name].operations: + print( + f"Cannot create operationId ({operationId}) with path " + f"({verb}) {path.path}. already defined: " + f"{resources[name].operations[operationId]}" + ) + continue + operationId = operationId.replace( + "$task", "" + ) # NOTE: Not sure if this is the right thing to do + resources[name].operations[operationId] = v + return resources + + +# module_generation procs + + +def generate_amazon_cloud(args: Iterable): + module_list = [] + + for type_name in RESOURCES: + file_name = re.sub("::", "_", type_name) + print(f"Generating modules {file_name}") + schema_file = args.schema_dir / f"{file_name}.json" + schema = json.loads(schema_file.read_text()) + + module = AnsibleModuleBaseAmazon(schema=schema) + + if module.is_trusted(args.target_dir): + module.renderer(target_dir=args.target_dir, next_version=args.next_version) + module_list.append(module.name) + + modules = [f"plugins/modules/{module}.py" for module in module_list] + module_utils = ["plugins/module_utils/core.py", "plugins/module_utils/utils.py"] + + ignore_dir = args.target_dir / "tests" / "sanity" + ignore_dir.mkdir(parents=True, exist_ok=True) + + for version in ["2.9", "2.10", "2.11", "2.12", "2.13", "2.14"]: + per_version_ignore_content = "" + skip_list = [] + + if version in ["2.9", "2.10", "2.11"]: + skip_list += [ + "compile-2.7!skip", # Py3.6+ + "compile-3.5!skip", # Py3.6+ + "import-2.7!skip", # Py3.6+ + "import-3.5!skip", # Py3.6+ + "future-import-boilerplate!skip", # Py2 only + "metaclass-boilerplate!skip", # Py2 only + "compile-2.6!skip", # Py3.6+ + "import-2.6!skip", # Py3.6+ + ] + validate_skip_needed = [ + "plugins/modules/s3_bucket.py", + "plugins/modules/backup_backup_vault.py", + "plugins/modules/backup_framework.py", + "plugins/modules/backup_report_plan.py", + "plugins/modules/lambda_function.py", + "plugins/modules/rdsdb_proxy.py", + "plugins/modules/redshift_cluster.py", + "plugins/modules/eks_cluster.py", + "plugins/modules/dynamodb_global_table.py", + "plugins/modules/kms_replica_key.py", + "plugins/modules/rds_db_proxy.py", + "plugins/modules/iam_server_certificate.py", + "plugins/modules/cloudtrail_trail.py", + "plugins/modules/route53_key_signing_key.py", + "plugins/modules/redshift_endpoint_authorization.py", + "plugins/modules/eks_fargate_profile.py", + ] + mutually_exclusive_skip = [ + "plugins/modules/eks_addon.py", + "plugins/modules/eks_fargate_profile.py", + "plugins/modules/redshift_endpoint_authorization.py", + "plugins/modules/route53_key_signing_key.py", + ] + + for f in module_utils: + for skip in skip_list: + per_version_ignore_content += f"{f} {skip}\n" + + for f in modules: + for skip in skip_list: + per_version_ignore_content += f"{f} {skip}\n" + + if f in validate_skip_needed: + if version in ["2.10", "2.11", "2.12", "2.13", "2.14"]: + if ( + f == "plugins/modules/redshift_endpoint_authorization.py" + and version in ("2.11", "2.12", "2.13", "2.14") + ): + pass + else: + validate_skip_list = [ + "validate-modules:no-log-needed", + ] + for skip in validate_skip_list: + per_version_ignore_content += f"{f} {skip}\n" + + if version in ["2.10", "2.11", "2.12", "2.13", "2.14"]: + per_version_ignore_content += ( + f"{f} validate-modules:parameter-state-invalid-choice\n" + ) + + for f in mutually_exclusive_skip: + per_version_ignore_content += ( + f"{f} validate-modules:mutually_exclusive-type\n" + ) + + ignore_file = ignore_dir / f"ignore-{version}.txt" + ignore_file.write_text(per_version_ignore_content) + + meta_dir = args.target_dir / "meta" + meta_dir.mkdir(parents=True, exist_ok=True) + yaml_dict = { + "requires_ansible": """>=2.11.0""", + "action_groups": {"aws": []}, + "plugin_routing": {"modules": {}}, + } + for m in module_list: + yaml_dict["action_groups"]["aws"].append(m) + + yaml_dict["plugin_routing"]["modules"].update( + { + "rdsdb_proxy": {"redirect": "amazon.cloud.rds_db_proxy"}, + "s3_object_lambda_access_point": { + "redirect": "amazon.cloud.s3objectlambda_access_point" + }, + "s3_object_lambda_access_point_policy": { + "redirect": "amazon.cloud.s3objectlambda_access_point_policy" + }, + } + ) + yaml_dict["action_groups"]["aws"].extend( + [ + "rdsdb_proxy", + "s3_object_lambda_access_point", + "s3_object_lambda_access_point_policy", + ] + ) + + runtime_file = meta_dir / "runtime.yml" + with open(runtime_file, "w") as file: + yaml.safe_dump(yaml_dict, file, sort_keys=False) + + collection_dir = pkg_resources.resource_filename("gouttelette", "data") + print(f"Copying files from {collection_dir}") + shutil.copytree(collection_dir, args.target_dir, dirs_exist_ok=True) + + return + + +def generate_vmware_rest(args: Iterable): + module_list = [] + for json_file in ["vcenter.json", "content.json", "appliance.json"]: + print("Generating modules from {}".format(json_file)) + api_spec_file = args.target_dir / "api_specifications" / "7.0.2" / json_file + raw_content = api_spec_file.read_text() + swagger_file = SwaggerFile(raw_content) + resources = swagger_file.init_resources(swagger_file.paths.values()) + + for resource in resources.values(): + if resource.name == "appliance_logging_forwarding": + continue + if resource.name.startswith("vcenter_trustedinfrastructure"): + continue + if "list" in resource.operations: + module = AnsibleInfoListOnlyModule( + resource, definitions=swagger_file.definitions + ) + if ( + module.is_trusted(args.target_dir) + and len(module.default_operationIds) > 0 + ): + module.renderer( + target_dir=args.target_dir, next_version=args.next_version + ) + module_list.append(module.name) + elif "get" in resource.operations: + module = AnsibleInfoNoListModule( + resource, definitions=swagger_file.definitions + ) + if ( + module.is_trusted(args.target_dir) + and len(module.default_operationIds) > 0 + ): + module.renderer( + target_dir=args.target_dir, next_version=args.next_version + ) + module_list.append(module.name) + + module = AnsibleModuleBaseVmware( + resource, definitions=swagger_file.definitions + ) + + if ( + module.is_trusted(args.target_dir) + and len(module.default_operationIds) > 0 + ): + module.renderer( + target_dir=args.target_dir, next_version=args.next_version + ) + module_list.append(module.name) + module_utils_dir = args.target_dir / "plugins" / "module_utils" + module_utils_dir.mkdir(exist_ok=True) + vmware_rest_dest = module_utils_dir / "vmware_rest.py" + vmware_rest_dest.write_bytes( + pkg_resources.resource_string("gouttelette", "module_utils/vmware_rest.py") + ) + return + + +def main(): + generator = get_generator() + if not generator: + raise Exception("gouttelette.yaml is missing generator value") + + generator_coll = re.sub("(.*)_code_generator", r"\1", generator["name"]) + parser = argparse.ArgumentParser( + description=f"Build the {generator['name']} modules." + ) + + parser.add_argument( + "--target-dir", + dest="target_dir", + type=pathlib.Path, + default=pathlib.Path(generator["default_path"]), + help=f"location of the target repository (default: {generator['default_path']})", + ) + parser.add_argument( + "--next-version", + type=str, + default="TODO", + help="the next major version", + ) + if generator.get("name") == "amazon_cloud_code_generator": + parser.add_argument( + "--schema-dir", + type=pathlib.Path, + default=pathlib.Path("gouttelette/api_specifications/"), + help="location where to store the collected schemas (default: ./gouttelette/api_specifications/amazon_cloud)", + ) + args = parser.parse_args() + func = "generate_" + generator_coll + "(args)" + eval(func) + + info = VersionInfo(generator["name"]) + dev_md = args.target_dir / "dev.md" + dev_md.write_text( + ( + "The modules are autogenerated by:\n" + "https://github.com/ansible-collections/gouttelette\n" + "" + f"version: {info.version_string()}\n" + ) + ) + dev_md = args.target_dir / "commit_message" + dev_md.write_text( + ( + "bump auto-generated modules\n" + "\n" + "The modules are autogenerated by:\n" + "https://github.com/ansible-collections/gouttelette\n" + "" + f"version: {info.version_string()}\n" + ) + ) + + +if __name__ == "__main__": + main() diff --git a/gouttelette/cmd/refresh_schema.py b/gouttelette/cmd/refresh_schema.py new file mode 100644 index 0000000..9c0ee31 --- /dev/null +++ b/gouttelette/cmd/refresh_schema.py @@ -0,0 +1,72 @@ +import argparse +import pathlib +import re +from typing import Dict, Iterable, List, Optional, TypedDict +import boto3 +from .resources import RESOURCES +from .generator import CloudFormationWrapper +import json +from gouttelette.utils import camel_to_snake + + +class Schema(TypedDict): + """A type for the JSONSchema spec""" + + typeName: str + description: str + properties: Dict + definitions: Optional[Dict] + required: Optional[List] + primaryIdentifier: List + readOnlyProperties: Optional[List] + createOnlyProperties: Optional[List] + taggable: Optional[bool] + handlers: Optional[Dict] + + +def generate_schema(raw_content) -> Dict: + json_content = json.loads(raw_content) + schema: Dict[str, Schema] = json_content + + for key, value in schema.items(): + if key != "anyOf": + if isinstance(value, list): + elems = [] + for v in value: + if isinstance(v, list): + elems.extend( + [camel_to_snake(p.split("/")[-1].strip()) for p in v] + ) + else: + elems.append(camel_to_snake(v.split("/")[-1].strip())) + + schema[key] = elems + + return schema + + +def main() -> None: + parser = argparse.ArgumentParser(description="Collect the schema definition.") + parser.add_argument( + "--schema-dir", + type=pathlib.Path, + default=pathlib.Path("amazon_cloud_code_generator/api_specifications"), + help="location where to store the collected schemas (default: ./amazon_cloud_code_generator/api_specifications)", + ) + args = parser.parse_args() + + for type_name in RESOURCES: + print("Collecting Schema") + print(type_name) + cloudformation = CloudFormationWrapper(boto3.client("cloudformation")) + raw_content = cloudformation.generate_docs(type_name) + schema = generate_schema(raw_content) + file_name = re.sub("::", "_", type_name) + if not args.schema_dir.exists(): + pathlib.Path(args.schema_dir).mkdir(parents=True, exist_ok=True) + schema_file = args.schema_dir / f"{file_name}.json" + schema_file.write_text(json.dumps(schema, indent=2)) + + +if __name__ == "__main__": + main() diff --git a/gouttelette/cmd/resources.py b/gouttelette/cmd/resources.py new file mode 100644 index 0000000..526d4eb --- /dev/null +++ b/gouttelette/cmd/resources.py @@ -0,0 +1,45 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# Copyright: (c) 2022, Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + + +RESOURCES = [ + "AWS::Backup::BackupVault", + "AWS::Backup::Framework", + "AWS::Backup::ReportPlan", + "AWS::EKS::Cluster", + "AWS::IAM::Role", + "AWS::Lambda::CodeSigningConfig", + "AWS::Lambda::EventSourceMapping", + "AWS::Lambda::Function", + "AWS::Logs::LogGroup", + "AWS::Logs::QueryDefinition", + "AWS::Logs::ResourcePolicy", + "AWS::RDS::DBProxy", + "AWS::Redshift::Cluster", + "AWS::Redshift::EventSubscription", + "AWS::S3::AccessPoint", + "AWS::S3::Bucket", + "AWS::S3::MultiRegionAccessPoint", + "AWS::S3::MultiRegionAccessPointPolicy", + "AWS::S3ObjectLambda::AccessPoint", + "AWS::S3ObjectLambda::AccessPointPolicy", + # 0.2.0 + "AWS::EKS::FargateProfile", + "AWS::DynamoDB::GlobalTable", + "AWS::EKS::Addon", + "AWS::IAM::ServerCertificate", + "AWS::KMS::Alias", + "AWS::KMS::ReplicaKey", + "AWS::RDS::DBProxyEndpoint", + "AWS::Redshift::EndpointAccess", + "AWS::Redshift::EndpointAuthorization", + "AWS::Redshift::ScheduledAction", + "AWS::Route53::DNSSEC", + "AWS::Route53::KeySigningKey", + "AWS::CloudTrail::Trail", + "AWS::CloudTrail::EventDataStore", + "AWS::CloudWatch::CompositeAlarm", + "AWS::CloudWatch::MetricStream", +] diff --git a/gouttelette/data/changelogs/fragments/drop-old-ansible.yml b/gouttelette/data/changelogs/fragments/drop-old-ansible.yml new file mode 100644 index 0000000..5a2668f --- /dev/null +++ b/gouttelette/data/changelogs/fragments/drop-old-ansible.yml @@ -0,0 +1,2 @@ +breaking_changes: +- amazon.cloud collection - Support for ansible-core < 2.11 has been dropped (https://github.com/ansible-collections/amazon.cloud/pull/27). diff --git a/gouttelette/module_utils/test_vmware_rest.py b/gouttelette/module_utils/test_vmware_rest.py new file mode 100644 index 0000000..12a9c4b --- /dev/null +++ b/gouttelette/module_utils/test_vmware_rest.py @@ -0,0 +1,15 @@ +from vmware_rest import get_subdevice_type, gen_args + + +def test_get_subdevice_type(): + assert get_subdevice_type("http://a/{b}/b/{c}/d") == "c" + assert get_subdevice_type("http://a/{b}/b/{c}/d/{e}") is None + assert get_subdevice_type("http://a/{b}/b") is None + + +def test_gen_args(): + assert gen_args({"a": [1, 2, 3]}, []) == "" + assert gen_args({"a": [1, 2, 3]}, ["a"]) == "?a=1&a=2&a=3" + assert gen_args({"b a f": "b c a"}, ["b a f"]) == "?b%20a%20f=b%20c%20a" + assert gen_args({"b": False}, ["b"]) == "" + assert gen_args({"b": None}, ["b"]) == "" diff --git a/gouttelette/module_utils/vmware_rest.py b/gouttelette/module_utils/vmware_rest.py new file mode 100644 index 0000000..ec48636 --- /dev/null +++ b/gouttelette/module_utils/vmware_rest.py @@ -0,0 +1,416 @@ +# This file is maintained in the vmware_rest_code_generator project +# https://github.com/ansible-collections/vmware_rest_code_generator +# Copyright (c) 2021 Ansible Project +# +# This code is part of Ansible, but is an independent component. +# This particular file snippet, and this file snippet only, is BSD licensed. +# Modules you write using this snippet, which is embedded dynamically by Ansible +# still belong to the author of the module, and may assign their own license +# to the complete work. +# +# Redistribution and use in source and binary forms, with or without modification, +# are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# + +import hashlib +import importlib +import json +import re + +import urllib.parse + +from ansible.module_utils.basic import missing_required_lib +from ansible.module_utils.parsing.convert_bool import boolean + + +async def open_session( + vcenter_hostname=None, + vcenter_username=None, + vcenter_password=None, + validate_certs=True, + log_file=None, +): + validate_certs = boolean(validate_certs) + m = hashlib.sha256() + m.update(vcenter_hostname.encode()) + m.update(vcenter_username.encode()) + m.update(vcenter_password.encode()) + if log_file: + m.update(log_file.encode()) + m.update(b"yes" if validate_certs else b"no") + digest = m.hexdigest() + # TODO: Handle session timeout + if digest in open_session._pool: + return open_session._pool[digest] + + exceptions = importlib.import_module( + "ansible_collections.cloud.common.plugins.module_utils.turbo.exceptions" + ) + try: + aiohttp = importlib.import_module("aiohttp") + except ImportError: + raise exceptions.EmbeddedModuleFailure(msg=missing_required_lib("aiohttp")) + + if not aiohttp: + raise exceptions.EmbeddedModuleFailure(msg="Failed to import aiohttp") + + if log_file: + trace_config = aiohttp.TraceConfig() + + async def on_request_end(session, trace_config_ctx, params): + with open(log_file, "a+", encoding="utf-8") as fd: + answer = await params.response.text() + fd.write( + f"{params.method}: {params.url}\n" + f"headers: {params.headers}\n" + f" status: {params.response.status}\n" + f" answer: {answer}\n\n" + ) + + trace_config.on_request_end.append(on_request_end) + trace_configs = [trace_config] + else: + trace_configs = [] + + auth = aiohttp.BasicAuth(vcenter_username, vcenter_password) + if validate_certs: + connector = aiohttp.TCPConnector(limit=20) + else: + connector = aiohttp.TCPConnector(limit=20, ssl=False) + async with aiohttp.ClientSession( + connector=connector, connector_owner=False, trace_configs=trace_configs + ) as session: + try: + async with session.post( + "https://{hostname}/rest/com/vmware/cis/session".format( + hostname=vcenter_hostname + ), + auth=auth, + ) as resp: + if resp.status != 200: + raise exceptions.EmbeddedModuleFailure( + "Authentication failure. code: {0}, json: {1}".format( + resp.status, await resp.text() + ) + ) + json = await resp.json() + except aiohttp.client_exceptions.ClientConnectorError as e: + raise exceptions.EmbeddedModuleFailure(f"Authentication failure: {e}") + + session_id = json["value"] + session = aiohttp.ClientSession( + connector=connector, + headers={ + "vmware-api-session-id": session_id, + "content-type": "application/json", + }, + connector_owner=False, + trace_configs=trace_configs, + ) + open_session._pool[digest] = session + return session + + +open_session._pool = {} + + +def gen_args(params, in_query_parameter): + elements = [] + for i in in_query_parameter: + if i.startswith("filter."): # < 7.0.2 + v = params.get("filter_" + i[7:]) + else: + v = params.get(i) + if not v: + continue + if isinstance(v, list): + for j in v: + elements += [(i, j)] + elif isinstance(v, bool) and v: + elements += [(i, str(v).lower())] + else: + elements += [(i, str(v))] + if not elements: + return "" + return "?" + urllib.parse.urlencode(elements, quote_via=urllib.parse.quote) + + +def session_timeout(params): + exceptions = importlib.import_module( + "ansible_collections.cloud.common.plugins.module_utils.turbo.exceptions" + ) + try: + aiohttp = importlib.import_module("aiohttp") + except ImportError: + raise exceptions.EmbeddedModuleFailure(msg=missing_required_lib("aiohttp")) + + if not aiohttp: + raise exceptions.EmbeddedModuleFailure(msg="Failed to import aiohttp") + out = {} + if params.get("session_timeout"): + out["timeout"] = aiohttp.ClientTimeout(total=params.get("session_timeout")) + return out + + +async def update_changed_flag(data, status, operation): + if data is None: + data = {"value": {}} + elif isinstance(data, list): # e.g: appliance_infraprofile_configs_info + data = {"value": data} + elif isinstance(data, str): + data = {"value": data} + elif isinstance(data, dict) and "value" not in data: # 7.0.2+ + data = {"value": data} + elif isinstance(data, bool): + data = {"value": data} + + if isinstance(data["value"], str) and data["value"][0] in [ + "{", + "]", + ]: # e.g: appliance_infraprofile_configs + data["value"] == json.loads(data["value"]) + + if status == 500: + data["failed"] = True + data["changed"] = False + elif operation in ["create", "clone", "instant_clone"] and status in [200, 201]: + data["failed"] = False + data["changed"] = True + elif operation == "update" and status in [200, 204]: + data["failed"] = False + data["changed"] = True + elif operation == "upgrade" and status in [200]: + data["failed"] = False + data["changed"] = True + elif operation == "set" and status in [200, 204]: + data["failed"] = False + data["changed"] = True + elif operation == "delete" and status in [200, 204]: + data["failed"] = False + data["changed"] = True + elif operation == "delete" and status == 404: + data["failed"] = False + data["changed"] = False + elif operation in ["get", "list"] and status in [200]: + data["failed"] = False + data["changed"] = False + elif operation in ["get", "list"] and status in [404]: + data["failed"] = True + data["changed"] = False + + elif status >= 400: + data["failed"] = True + data["changed"] = False + + if not isinstance(data["value"], dict): + pass + elif data.get("type") == "com.vmware.vapi.std.errors.not_found": + if operation == "delete": + data["failed"] = False + data["changed"] = False + else: + data["failed"] = True + data["changed"] = False + elif data.get("type") == "com.vmware.vapi.std.errors.already_in_desired_state": + data["failed"] = False + data["changed"] = False + elif data.get("type") == "com.vmware.vapi.std.errors.already_exists": + data["failed"] = False + data["changed"] = False + elif ( + data.get("value", {}).get("error_type") in ["NOT_FOUND"] + and operation == "delete" + ): + data["failed"] = False + data["changed"] = False + elif data.get("value", {}).get("error_type") in [ + "ALREADY_EXISTS", + "ALREADY_IN_DESIRED_STATE", + ]: + data["failed"] = False + data["changed"] = False + elif data.get("type") == "com.vmware.vapi.std.errors.resource_in_use": + # NOTE: this is a shortcut/hack. We get this issue if a CDROM already exists + data["failed"] = False + data["changed"] = False + elif ( + data.get("type") == "com.vmware.vapi.std.errors.internal_server_error" + and data["value"] + and data["value"]["messages"] + and data["value"]["messages"][0]["args"] + == [ + "com.vmware.vim.binding.vim.fault.DuplicateName cannot be cast to com.vmware.vim.binding.vim.fault.AlreadyConnected" + ] + ): + # NOTE: another one for vcenter_host + data["failed"] = False + data["changed"] = False + elif data.get("type", "").startswith("com.vmware.vapi.std.errors"): + data["failed"] = True + # 7.0.3, vcenter_ovf_libraryitem returns status 200 on failure + elif data.get("value", {}).get("error", {}).get("errors", []): + data["failed"] = True + + return data + + +async def list_devices(session, url): + existing_entries = [] + + async with session.get(url) as resp: + _json = await resp.json() + return _json + + +async def build_full_device_list(session, url, device_list): + import asyncio + + device_ids = [] + + if isinstance(device_list, list): + value = device_list + else: # 7.0.2 < + value = device_list["value"] + for i in value: + # Content library returns string {"value": "library_id"} + if isinstance(i, str): + device_ids.append(i) + continue + fields = list(i.values()) + if len(fields) != 1: + # The list already comes with all the details + return device_list + device_ids.append(fields[0]) + + tasks = [ + asyncio.ensure_future(get_device_info(session, url, _id)) for _id in device_ids + ] + + return [await i for i in tasks] + + +async def get_device_info(session, url, _id): + # remove the action=foo from the URL + m = re.search("(.+)(action=[-a-z]+)(.*)", url) + if m: + url = f"{m.group(1)}{m.group(3)}" + url = url.rstrip("?") + + # workaround for content_library_item_info + if "item?library_id=" in url: + item_url = url.split("?")[0] + "/" + _id + else: + item_url = url + "/" + _id + + async with session.get(item_url) as resp: + if resp.status == 200: + _json = await resp.json() + if "value" not in _json: # 7.0.2+ + _json = {"value": _json} + _json["id"] = str(_id) + return _json + + +async def exists( + params, session, url, uniquity_keys=None, per_id_url=None, comp_func=None +): + if not uniquity_keys: + uniquity_keys = [] + if not per_id_url: + per_id_url = url + + def default_comp_func(device): + for k in uniquity_keys: + if not params.get(k): + continue + if isinstance(device, dict): # 7.0.2 < + v = device["value"].get(k) + elif isinstance(device, list): + v = device + else: + exceptions = importlib.import_module( + "ansible_collections.cloud.common.plugins.module_utils.turbo.exceptions" + ) + raise exceptions.EmbeddedModuleFailure(msg="Unexpect type") + + if isinstance(k, int) or isinstance(v, str): + k = str(k) + v = str(v) + if v == params.get(k): + return device + + if not comp_func: + comp_func = default_comp_func + + uniquity_keys += ["label", "pci_slot_number", "sata"] + + devices = await list_devices(session, url) + full_devices = await build_full_device_list(session, per_id_url, devices) + + for device in full_devices: + if comp_func(device): + return device + + +def set_subkey(root, path, value): + cur_loc = root + splitted = path.split("/") + for j in splitted[:-1]: + if j not in cur_loc: + cur_loc[j] = {} + cur_loc = cur_loc[j] + cur_loc[splitted[-1]] = value + + +def prepare_payload(params, payload_format): + payload = {} + for i in payload_format["body"].keys(): + if params[i] is None: + continue + + path = payload_format["body"][i] + set_subkey(payload, path, params[i]) + return payload + + +def get_subdevice_type(url): + """If url needs a subkey, return its name.""" + candidates = [] + for i in url.split("/"): + if i.startswith("{"): + candidates.append(i[1:-1]) + if len(candidates) != 2: + return + return candidates[-1].split("}")[0] + + +def get_device_type(url): + device_type = url.split("/")[-1] + # NOTE: This mapping can be extracted from the delete end-point of the + # resource, e.g: + # /rest/vcenter/vm/{vm}/hardware/ethernet/{nic} -> nic + # Also, it sounds like we can use "list_index" instead + if device_type == "ethernet": + return "nic" + elif device_type in ["sata", "scsi"]: + return "adapter" + elif device_type in ["parallel", "serial"]: + return "port" + else: + return device_type diff --git a/gouttelette/templates/amazon_cloud/default_module.j2 b/gouttelette/templates/amazon_cloud/default_module.j2 new file mode 100644 index 0000000..016028c --- /dev/null +++ b/gouttelette/templates/amazon_cloud/default_module.j2 @@ -0,0 +1,74 @@ +{% include 'templates/amazon_cloud/header.j2' %} + +import json + +from ansible_collections.amazon.aws.plugins.module_utils.core import AnsibleAWSModule +from ansible_collections.amazon.cloud.plugins.module_utils.core import CloudControlResource +from ansible_collections.amazon.cloud.plugins.module_utils.core import snake_dict_to_camel_dict +from ansible_collections.amazon.cloud.plugins.module_utils.core import ansible_dict_to_boto3_tag_list + + +def main(): + + argument_spec = dict( + state=dict(type='str', choices=['present', 'absent', 'list', 'describe', 'get'], default='present'), + ) + {{arguments}} + + required_if = [ + {{ required_if|join(",") }} + ] + mutually_exclusive = [ + {{ mutually_exclusive|join(",") }} + ] + + module = AnsibleAWSModule(argument_spec=argument_spec, required_if=required_if, mutually_exclusive=mutually_exclusive, supports_check_mode=True) + cloud = CloudControlResource(module) + + type_name = {{resource_type}} + + params = {} + {{params}} + # The DesiredState we pass to AWS must be a JSONArray of non-null values + _params_to_set = {k: v for k, v in params.items() if v is not None} + + # Only if resource is taggable + if module.params.get("tags") is not None: + _params_to_set["tags"] = ansible_dict_to_boto3_tag_list( + module.params["tags"] + ) + + params_to_set = snake_dict_to_camel_dict(_params_to_set, capitalize_first=True) + + # Ignore createOnlyProperties that can be set only during resource creation + create_only_params = {{create_only_properties}} + + # Necessary to handle when module does not support all the states + handlers = {{handlers}} + + state = module.params.get('state') + identifier = {{primary_identifier}} + {{ ensure_all_identifiers_defined }} + results = {"changed": False, "result": {}} + + if state == "list": + if "list" not in handlers: + module.exit_json(**results, msg=f"Resource type {type_name} cannot be listed.") + results["result"] = cloud.list_resources(type_name, identifier) + + if state in ("describe", "get"): + if "read" not in handlers: + module.exit_json(**results, msg=f"Resource type {type_name} cannot be read.") + results["result"] = cloud.get_resource(type_name, identifier) + + if state == "present": + results = cloud.present(type_name, identifier, params_to_set, create_only_params) + + if state == "absent": + results["changed"] |= cloud.absent(type_name, identifier) + + module.exit_json(**results) + + +if __name__ == '__main__': + main() diff --git a/gouttelette/templates/amazon_cloud/header.j2 b/gouttelette/templates/amazon_cloud/header.j2 new file mode 100644 index 0000000..fd00cdc --- /dev/null +++ b/gouttelette/templates/amazon_cloud/header.j2 @@ -0,0 +1,30 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# Copyright: (c) 2022, Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) +# template: header.j2 +# This module is autogenerated by amazon_cloud_code_generator. +# See: https://github.com/ansible-collections/amazon_cloud_code_generator + + +DOCUMENTATION = {{documentation}} + +EXAMPLES = r''' +''' + +RETURN = r''' +result: + description: + - When I(state=list), it is a list containing dictionaries of resource information. + - Otherwise, it is a dictionary of resource information. + - When I(state=absent), it is an empty dictionary. + returned: always + type: complex + contains: + identifier: + description: The unique identifier of the resource. + type: str + properties: + description: The resource properties. + type: dict +''' diff --git a/gouttelette/templates/vmware_rest/default_module.j2 b/gouttelette/templates/vmware_rest/default_module.j2 new file mode 100644 index 0000000..e1c6d45 --- /dev/null +++ b/gouttelette/templates/vmware_rest/default_module.j2 @@ -0,0 +1,328 @@ +{% include 'templates/vmware_rest/header.j2' %} + +# template: default_module.j2 +def build_url(params): + return ( + "https://{vcenter_hostname}" + "{{path}}").format(**params) + + +async def entry_point(module, session): + {% if operations|length > 1 %} + if module.params['state'] == "present": + if "_create" in globals(): + operation = "create" + else: + operation = "update" + elif module.params['state'] == "absent": + operation = "delete" + else: + operation = module.params['state'] + + func = globals()["_" + operation] + {% else %} + func = globals()["_{{operations.keys()|list|first}}"] + {% endif %} + return await func(module.params, session) + +{% for operation in operations|sort -%} +{% set verb = operations[operation][0] %} +{% set _path = operations[operation][1] %} + +{% if operation == "delete" -%} +async def _delete(params, session): + _in_query_parameters = PAYLOAD_FORMAT["delete"]["query"].keys() + payload = prepare_payload(params, PAYLOAD_FORMAT["delete"]) + subdevice_type = get_subdevice_type("{{ _path }}") + if subdevice_type and not params[subdevice_type]: + _json = (await exists(params, session, build_url(params))) + if _json: + params[subdevice_type] = _json['id'] + _url = ( + "https://{vcenter_hostname}" + "{{_path}}").format(**params) + gen_args(params, _in_query_parameters) + async with session.{{ verb }}(_url, json=payload, **session_timeout(params)) as resp: + try: + if resp.headers["Content-Type"] == "application/json": + _json = await resp.json() + except KeyError: + _json = {} + return await update_changed_flag(_json, resp.status, "delete") + + + +{% elif operation in ["create", "clone", "instant_clone"] -%} +async def _{{ operation }}(params, session): + +{% if name.startswith("content_") %} + {% set haz_lookup = 1 %} + lookup_url = per_id_url = build_url(params) + uniquity_keys = ["name"] + comp_func = None +{% elif name == "vcenter_vmtemplate_libraryitems" %} + {% set haz_lookup = 1 %} + lookup_url = "https://{vcenter_hostname}/api/content/library/item?library_id={library}".format(**params) + per_id_url = "https://{vcenter_hostname}/api/content/library/item".format(**params) + uniquity_keys = ["name"] + comp_func = None +{% elif name == "vcenter_ovf_libraryitem" %} + {% set haz_lookup = 1 %} + library_id = params['target']['library_id'] if 'library_id' in params["target"] else None + lookup_url = f"https://{params['vcenter_hostname']}/api/content/library/item?library_id={library_id}" + per_id_url = "https://{vcenter_hostname}/api/content/library/item".format(**params) + uniquity_keys = None + def comp_func(device): + return device["value"]["name"] == params["create_spec"].get("name") +{% elif list_index %} + {% set haz_lookup = 1 %} + {% set haz_lookup_with_filters = 1 %} + lookup_url = per_id_url = build_url(params) + uniquity_keys = ["{{list_index}}"] + comp_func = None + + async def lookup_with_filters(params, session, url): + # e.g: for the datacenter resources + if "folder" not in params: + return + if "name" not in params: + return + async with session.get(f"{url}?names={params['name']}&folders={params['folder']}") as resp: + _json = await resp.json() + if isinstance(_json, list) and len(_json) == 1: + return await get_device_info(session, url, _json[0]["{{list_index}}"]) +{% else %} + uniquity_keys = [] +{% endif %} + +{% if haz_lookup is defined %} + _json = None + {% if list_index %} + if params["{{list_index}}"]: + _json = await get_device_info(session, build_url(params), params["{{list_index}}"]) + {% endif %} + if not _json and (uniquity_keys or comp_func): + _json = await exists(params, session, url=lookup_url, uniquity_keys=uniquity_keys, per_id_url=per_id_url, comp_func=comp_func) + {% if haz_lookup_with_filters is defined %} + if not _json: + _json = await lookup_with_filters(params, session, build_url(params)) + {% endif %} + + if _json: + if "value" not in _json: # 7.0.2+ + _json = {"value": _json} + if "_update" in globals(): + params["{{list_index}}"] = _json["id"] + return (await globals()["_update"](params, session)) + + + {% if name == "vcenter_vmtemplate_libraryitems" %} + extra_info_url = "https://{vcenter_hostname}/api/vcenter/vm-template/library-items/{id}".format( + **params, + id=_json["id"] + ) + async with session.get(extra_info_url) as resp: + if resp.status == 200: + extra_json = await resp.json() + for k, v in extra_json.items(): + _json["value"][k] = v + {% endif %} + + return (await update_changed_flag(_json, 200, 'get')) +{% endif %} + + payload = prepare_payload(params, PAYLOAD_FORMAT["{{ operation }}"]) + _url = ( + "https://{vcenter_hostname}" + "{{ _path }}").format(**params) + async with session.{{ verb }}(_url, json=payload, **session_timeout(params)) as resp: + if resp.status == 500: + text = await resp.text() + raise EmbeddedModuleFailure(f"Request has failed: status={resp.status}, {text}") + try: + if resp.headers["Content-Type"] == "application/json": + _json = await resp.json() + except KeyError: + _json = {} + + if (resp.status in [200, 201]) and "error" not in _json: + if isinstance(_json, str): # 7.0.2 and greater + _id = _json # TODO: fetch the object + elif isinstance(_json, dict) and "value" not in _json: + _id = list(_json["value"].values())[0] + elif isinstance(_json, dict) and "value" in _json: + _id = _json["value"] + _json_device_info = await get_device_info(session, _url, _id) + if _json_device_info: + _json = _json_device_info + + return await update_changed_flag(_json, resp.status, "{{ operation }}") + + +{% elif operation == "update" -%} +async def _update(params, session): + payload = prepare_payload(params, PAYLOAD_FORMAT["update"]) + _url = ( + "https://{vcenter_hostname}" + "{{ _path }}").format(**params) + async with session.get(_url, **session_timeout(params)) as resp: + _json = await resp.json() + if "value" in _json: + value = _json["value"] + else: # 7.0.2 and greater + value = _json + for k, v in value.items(): + if k in payload: + if isinstance(payload[k], dict) and isinstance(v, dict): + to_delete = True + for _k in list(payload[k].keys()): + if payload[k][_k] != v.get(_k): + to_delete = False + if to_delete: + del payload[k] + elif payload[k] == v: + del payload[k] + elif payload[k] == {}: + del payload[k] + + if payload == {} or payload == {"spec": {}}: + # Nothing has changed + if "value" not in _json: # 7.0.2 + _json = {"value": _json} + _json["id"] = params.get("{{list_index}}") + return await update_changed_flag(_json, resp.status, "get") + async with session.{{ verb }}(_url, json=payload, **session_timeout(params)) as resp: + try: + if resp.headers["Content-Type"] == "application/json": + _json = await resp.json() + except KeyError: + _json = {} + if "value" not in _json: # 7.0.2 + _json = {"value": _json} + + # e.g: content_configuration + if not _json and resp.status == 204: + async with session.get(_url, **session_timeout(params)) as resp_get: + _json_get = await resp_get.json() + if _json_get: + _json = _json_get + + _json["id"] = params.get("{{list_index}}") + return await update_changed_flag(_json, resp.status, "update") + + +{% elif operation == "set" -%} +async def _{{ operation }}(params, session): + _in_query_parameters = PAYLOAD_FORMAT["{{ operation }}"]["query"].keys() + payload = prepare_payload(params, PAYLOAD_FORMAT["{{ operation }}"]) + subdevice_type = get_subdevice_type("{{ _path }}") + if subdevice_type and not params[subdevice_type]: + _json = (await exists(params, session, build_url(params))) + if _json: + params[subdevice_type] = _json['id'] + _url = ( + "https://{vcenter_hostname}" + "{{ _path }}").format(**params) + gen_args(params, _in_query_parameters) + async with session.get(_url, json=payload, **session_timeout(params)) as resp: + before = await resp.json() + + async with session.{{ verb }}(_url, json=payload, **session_timeout(params)) as resp: + try: + if resp.headers["Content-Type"] == "application/json": + _json = await resp.json() + except KeyError: + _json = {} + if "value" not in _json: # 7.0.2 + _json = {"value": _json} + + + {% if _path == "/api/appliance/networking/dns/servers" %} + if ( + resp.status == 500 + and + "messages" in _json["value"] + and + _json["value"]["messages"] + and + "id" in _json["value"]["messages"][0] + and + _json["value"]["messages"][0]["id"] == "com.vmware.applmgmt.err_operation_failed" + and + "args" in _json["value"]["messages"][0] + and + "changing state RUNNING → CLOSED" in _json["value"]["messages"][0]["args"][0] + ): + # vSphere 7.0.2, a network configuration changes of the appliance raise a systemd error, + # but the change is applied. The problem can be resolved by a yum update. + async with session.get(_url, json=payload, **session_timeout(params)) as resp: + _json = {"value": await resp.json()} + {% endif %} + + + # The PUT answer does not let us know if the resource has actually been + # modified + if resp.status < 300: + async with session.get(_url, json=payload, **session_timeout(params)) as resp_get: + after = await resp_get.json() + if before == after: + return await update_changed_flag(after, resp_get.status, "get") + return await update_changed_flag(_json, resp.status, "set") + + +{% elif operation not in ["get", "list"] %} + +async def _{{ operation }}(params, session): + _in_query_parameters = PAYLOAD_FORMAT["{{ operation }}"]["query"].keys() + payload = prepare_payload(params, PAYLOAD_FORMAT["{{ operation }}"]) + subdevice_type = get_subdevice_type("{{ _path }}") + if subdevice_type and not params[subdevice_type]: + _json = (await exists(params, session, build_url(params))) + if _json: + params[subdevice_type] = _json['id'] + _url = ( + "https://{vcenter_hostname}" + # aa + "{{ _path }}").format(**params) + gen_args(params, _in_query_parameters) + async with session.{{ verb }}(_url, json=payload, **session_timeout(params)) as resp: + try: + if resp.headers["Content-Type"] == "application/json": + _json = await resp.json() + except KeyError: + _json = {} + if "value" not in _json: # 7.0.2 + _json = {"value": _json} + + + {% if _path == "/api/appliance/networking/dns/servers" %} + if ( + resp.status == 500 + and + "messages" in _json["value"] + and + _json["value"]["messages"] + and + "id" in _json["value"]["messages"][0] + and + _json["value"]["messages"][0]["id"] == "com.vmware.applmgmt.err_operation_failed" + and + "args" in _json["value"]["messages"][0] + and + "changing state RUNNING → CLOSED" in _json["value"]["messages"][0]["args"][0] + ): + # vSphere 7.0.2, a network configuration changes of the appliance raise a systemd error, + # but the change is applied. The problem can be resolved by a yum update. + async with session.get(_url, json=payload, **session_timeout(params)) as resp: + _json = {"value": await resp.json()} + {% endif %} + + + return await update_changed_flag(_json, resp.status, "{{ operation }}") +{% endif %} + +{% endfor %} + + +if __name__ == '__main__': + import asyncio + + current_loop = asyncio.get_event_loop_policy().get_event_loop() + current_loop.run_until_complete(main()) diff --git a/gouttelette/templates/vmware_rest/header.j2 b/gouttelette/templates/vmware_rest/header.j2 new file mode 100644 index 0000000..b2b966d --- /dev/null +++ b/gouttelette/templates/vmware_rest/header.j2 @@ -0,0 +1,110 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# Copyright: (c) 2021, Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) +# template: header.j2 +# This module is autogenerated by vmware_rest_code_generator. +# See: https://github.com/ansible-collections/vmware_rest_code_generator +from __future__ import absolute_import, division, print_function +__metaclass__ = type + + +DOCUMENTATION = {{documentation}} + +EXAMPLES = r""" +""" + +RETURN = r""" +""" + +# This structure describes the format of the data expected by the end-points +PAYLOAD_FORMAT = {{payload_format}} # pylint: disable=line-too-long + +import json +import socket +from ansible.module_utils.basic import env_fallback +try: + from ansible_collections.cloud.common.plugins.module_utils.turbo.exceptions import EmbeddedModuleFailure + from ansible_collections.cloud.common.plugins.module_utils.turbo.module import AnsibleTurboModule as AnsibleModule + AnsibleModule.collection_name = "vmware.vmware_rest" +except ImportError: + from ansible.module_utils.basic import AnsibleModule +from ansible_collections.vmware.vmware_rest.plugins.module_utils.vmware_rest import ( + build_full_device_list, + exists, + gen_args, + get_device_info, + get_subdevice_type, + list_devices, + open_session, + prepare_payload, + update_changed_flag, + session_timeout, + ) + + + +def prepare_argument_spec(): + argument_spec = { + "vcenter_hostname": dict( + type='str', + required=True, + fallback=(env_fallback, ['VMWARE_HOST']), + ), + "vcenter_username": dict( + type='str', + required=True, + fallback=(env_fallback, ['VMWARE_USER']), + ), + "vcenter_password": dict( + type='str', + required=True, + no_log=True, + fallback=(env_fallback, ['VMWARE_PASSWORD']), + ), + "vcenter_validate_certs": dict( + type='bool', + required=False, + default=True, + fallback=(env_fallback, ['VMWARE_VALIDATE_CERTS']), + ), + "vcenter_rest_log_file": dict( + type='str', + required=False, + fallback=(env_fallback, ['VMWARE_REST_LOG_FILE']), + ), + "session_timeout": dict( + type='float', + required=False, + fallback=(env_fallback, ["VMWARE_SESSION_TIMEOUT"]), + ), + } + + {{arguments}} + return argument_spec + + +async def main(): + required_if = list([ + {{ required_if|join(",") }} + ]) + + module_args = prepare_argument_spec() + module = AnsibleModule(argument_spec=module_args, required_if=required_if, supports_check_mode=True) + if not module.params['vcenter_hostname']: + module.fail_json('vcenter_hostname cannot be empty') + if not module.params['vcenter_username']: + module.fail_json('vcenter_username cannot be empty') + if not module.params['vcenter_password']: + module.fail_json('vcenter_password cannot be empty') + try: + session = await open_session( + vcenter_hostname=module.params['vcenter_hostname'], + vcenter_username=module.params['vcenter_username'], + vcenter_password=module.params['vcenter_password'], + validate_certs=module.params['vcenter_validate_certs'], + log_file=module.params['vcenter_rest_log_file'],) + except EmbeddedModuleFailure as err: + module.fail_json(err.get_message()) + result = await entry_point(module, session) + module.exit_json(**result) diff --git a/gouttelette/templates/vmware_rest/info_list_and_get_module.j2 b/gouttelette/templates/vmware_rest/info_list_and_get_module.j2 new file mode 100644 index 0000000..4068e36 --- /dev/null +++ b/gouttelette/templates/vmware_rest/info_list_and_get_module.j2 @@ -0,0 +1,46 @@ +{% include 'header.j2' %} + +# template: info_list_and_get_module.j2 +def build_url(params): + import yarl + {% if list_index -%} + if params.get('{{list_index}}'): + _in_query_parameters = PAYLOAD_FORMAT["get"]["query"].keys() + return yarl.URL(( + "https://{vcenter_hostname}" + "{{path}}/").format(**params) + params['{{list_index}}'] + gen_args(params, _in_query_parameters), encoded=True) + {% endif -%} + _in_query_parameters = PAYLOAD_FORMAT["list"]["query"].keys() + return yarl.URL(( + "https://{vcenter_hostname}" + "{{list_path}}").format(**params) + gen_args(params, _in_query_parameters), encoded=True) + + +async def entry_point(module, session): + url = build_url(module.params) + async with session.get(url, **session_timeout(module.params)) as resp: + _json = await resp.json() + + if "value" not in _json: # 7.0.2+ + _json = {"value": _json} + + {% if list_index %} + if module.params.get('{{list_index}}'): + _json["id"] = module.params.get('{{list_index}}') + elif module.params.get("label"): # TODO extend the list of filter + _json = await exists(module.params, session, str(url)) + elif (isinstance(_json["value"], list) and len(_json["value"]) > 0 and + isinstance(_json["value"][0], str)): + # this is a list of id, we fetch the details + full_device_list = await build_full_device_list(session, str(url), _json) + _json = {"value": [i["value"] for i in full_device_list]} + {% endif %} + + return await update_changed_flag(_json, resp.status, "get") + + +if __name__ == "__main__": + import asyncio + + current_loop = asyncio.get_event_loop_policy().get_event_loop() + current_loop.run_until_complete(main()) diff --git a/gouttelette/templates/vmware_rest/info_no_list_module.j2 b/gouttelette/templates/vmware_rest/info_no_list_module.j2 new file mode 100644 index 0000000..d2a54d3 --- /dev/null +++ b/gouttelette/templates/vmware_rest/info_no_list_module.j2 @@ -0,0 +1,33 @@ +{% include 'header.j2' %} + +# template: info_no_list_module.j2 +def build_url(params): + return ( + "https://{vcenter_hostname}" + "{{path}}").format(**params) + + +async def entry_point(module, session): + return await _info(module.params, session) + + +async def _info(params, session): + payload_format = list(PAYLOAD_FORMAT.values())[0] + _in_query_parameters = payload_format["query"].keys() + _url = ( + "https://{vcenter_hostname}" + "{{path}}").format(**params) + gen_args(params, _in_query_parameters) + async with session.get(_url, **session_timeout(params)) as resp: + try: + if resp.headers["Content-Type"] == "application/json": + _json = await resp.json() + except KeyError: + _json = {} + return await update_changed_flag(_json, resp.status, "get") + + +if __name__ == "__main__": + import asyncio + + current_loop = asyncio.get_event_loop_policy().get_event_loop() + current_loop.run_until_complete(main()) diff --git a/gouttelette/utils.py b/gouttelette/utils.py index 3990363..2ffb87f 100644 --- a/gouttelette/utils.py +++ b/gouttelette/utils.py @@ -4,20 +4,41 @@ from dataclasses import dataclass from typing import Any, Dict, Iterable, List, Optional, TypedDict, Union import jinja2 -import pkg_resources +import baron +import redbaron import yaml +import re +import copy +import subprocess from pathlib import Path +from functools import lru_cache -def jinja2_renderer( - template_file: str, generator: str, **kwargs: Dict[str, Any] -) -> str: - templateLoader = jinja2.PackageLoader(generator) +def jinja2_renderer(template_file: str, **kwargs: Dict[str, Any]) -> str: + + template_path = re.sub("(.*)_code_generator", r"\1", get_generator()["name"]) + templateLoader = jinja2.FileSystemLoader("gouttelette") templateEnv = jinja2.Environment(loader=templateLoader) - template = templateEnv.get_template(template_file) + template = templateEnv.get_template( + "templates/" + template_path + "/" + template_file + ) return template.render(kwargs) +def get_generator() -> Dict[str, Any]: + generator = {} + with open("gouttelette.yml", "r") as file: + try: + generator.update({"name": yaml.safe_load(file)["generator"]}) + if "amazon_cloud_code_generator" in generator["name"]: + generator.update({"default_path": "cloud"}) + elif "vmware_rest_code_generator" in generator["name"]: + generator.update({"default_path": "vmware_rest"}) + except yaml.YAMLError as exc: + print(exc) + return generator + + def format_documentation(documentation: Any) -> str: yaml.Dumper.ignore_aliases = lambda *args: True # type: ignore @@ -67,9 +88,11 @@ def indent(text_block: str, indent: int = 0) -> str: return result -def get_module_from_config(module: str, generator: str) -> dict[str, Any]: +def get_module_from_config(module: str, target_dir: Path) -> dict[str, Any]: + + module_file = target_dir / "modules.yaml" + raw_content = module_file.read_text() - raw_content = pkg_resources.resource_string(generator, "config/modules.yaml") for i in yaml.safe_load(raw_content): if module in i: return i[module] or {} @@ -90,13 +113,166 @@ def python_type(value: str) -> str: return TYPE_MAPPING.get(value, value) +def run_git(git_dir: str, *args: List[Any]) -> List[Any]: + cmd = [ + "git", + "--git-dir", + git_dir, + ] + for arg in args: + cmd.append(arg) + r = subprocess.run(cmd, text=True, capture_output=True) + return r.stdout.rstrip().split("\n") + + +@lru_cache(maxsize=None) +def file_by_tag(git_dir: str) -> Dict[str, Any]: + tags = run_git(git_dir, "tag") + + files_by_tag: Dict[str, Any] = {} + for tag in tags: + files_by_tag[tag] = run_git(git_dir, "ls-tree", "-r", "--name-only", tag) + + return files_by_tag + + +def get_module_added_ins(module_name: str, git_dir: str) -> Dict[str, Any]: + added_ins: Dict[str, Any] = {"module": None, "options": {}} + module = f"plugins/modules/{module_name}.py" + + for tag, files in file_by_tag(git_dir).items(): + if "rc" in tag: + continue + if module in files: + if not added_ins["module"]: + added_ins["module"] = tag + content = "\n".join( + run_git( + git_dir, + "cat-file", + "--textconv", + f"{tag}:{module}", + ) + ) + try: + ast_file = redbaron.RedBaron(content) + except baron.BaronError as e: + print(f"Failed to parse {tag}:plugins/modules/{module_name}.py. {e}") + continue + doc_block = ast_file.find( + "assignment", target=lambda x: x.dumps() == "DOCUMENTATION" + ) + if not doc_block or not doc_block.value: + print(f"Cannot find DOCUMENTATION block for module {module_name}") + doc_content = yaml.safe_load(doc_block.value.to_python()) + for option in doc_content["options"]: + if option not in added_ins["options"]: + added_ins["options"][option] = tag + + return added_ins + + +def scrub_keys( + a_dict: Dict[str, Any], list_of_keys_to_remove: List[str] +) -> Dict[str, Any]: + """Filter a_dict by removing unwanted keys: values listed in list_of_keys_to_remove""" + if not isinstance(a_dict, dict): + return a_dict + return { + k: v + for k, v in ( + (k, scrub_keys(v, list_of_keys_to_remove)) for k, v in a_dict.items() + ) + if k not in list_of_keys_to_remove + } + + +def ignore_description(a_dict: Dict[str, Any]) -> Dict[str, Any]: + """ + Filter a_dict by removing description fields. + Handle when 'description' is a module suboption. + """ + a_dict_copy = copy.copy(a_dict) + if not isinstance(a_dict, dict): + return a_dict + + for k, v in a_dict_copy.items(): + if k == "description": + if isinstance(v, dict): + ignore_description(v) + else: + a_dict.pop(k) + ignore_description(v) + + +def ensure_description( + element: Dict[str, Any], *keys: List[Any], default: str = "Not Provived." +) -> Dict[str, Any]: + """ + Check if *keys (nested) exists in `element` (dict) and ensure it has the default value. + """ + if isinstance(element, dict): + for key, value in element.items(): + if key == "suboptions": + ensure_description(value, *keys) + + if isinstance(value, dict): + for akey in keys: + if akey not in value: + element[key][akey] = [default] + for k, v in value.items(): + ensure_description(v, *keys) + + return element + + +def _camel_to_snake(name: str, reversible: bool = False) -> str: + def prepend_underscore_and_lower(m: str) -> str: + return "_" + m.group(0).lower() + + if reversible: + upper_pattern = r"[A-Z]" + else: + # Cope with pluralized abbreviations such as TargetGroupARNs + # that would otherwise be rendered target_group_ar_ns + upper_pattern = r"[A-Z]{3,}s$" + + s1 = re.sub(upper_pattern, prepend_underscore_and_lower, name) + # Handle when there was nothing before the plural_pattern + if s1.startswith("_") and not name.startswith("_"): + s1 = s1[1:] + if reversible: + return s1 + + # Remainder of solution seems to be https://stackoverflow.com/a/1176023 + first_cap_pattern = r"(.)([A-Z][a-z]+)" + all_cap_pattern = r"([a-z0-9])([A-Z]+)" + s2 = re.sub(first_cap_pattern, r"\1_\2", s1) + return re.sub(all_cap_pattern, r"\1_\2", s2).lower() + + +def camel_to_snake(data: Any) -> Any: + if isinstance(data, str): + return _camel_to_snake(data) + elif isinstance(data, list): + return [_camel_to_snake(r) for r in data] + elif isinstance(data, dict): + b_dict: Dict[str, Any] = {} + for k in data.keys(): + if isinstance(data[k], dict): + b_dict[_camel_to_snake(k)] = camel_to_snake(data[k]) + else: + b_dict[_camel_to_snake(k)] = data[k] + return b_dict + + @dataclass class UtilsBase: name: str - def is_trusted(self, generator: str) -> bool: + def is_trusted(self, target_dir: Path) -> bool: try: - get_module_from_config(self.name, generator) + get_module_from_config(self.name, target_dir) return True except KeyError: print(f"- do not build: {self.name}") diff --git a/setup.cfg b/setup.cfg index 9eaba4a..ae8ef66 100644 --- a/setup.cfg +++ b/setup.cfg @@ -18,6 +18,10 @@ install_requires = ruamel.yaml jinja2 PyYAML + baron + redbaron + boto3 + pbr [options.package_data] * = *.txt, *.rst @@ -27,6 +31,8 @@ hello = *.msg [options.entry_points] console_scripts = gouttelette-refresh-examples = gouttelette.cmd.refresh_examples:main + gouttelette-refresh-schema = gouttelette.cmd.refresh_schema:main + gouttelette-refresh-modules = gouttelette.cmd.refresh_modules:main [options.packages.find] exclude = diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/cmd/fixtures/__init__.py b/tests/unit/cmd/fixtures/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/cmd/fixtures/expected_content.json b/tests/unit/cmd/fixtures/expected_content.json new file mode 100644 index 0000000..f370f14 --- /dev/null +++ b/tests/unit/cmd/fixtures/expected_content.json @@ -0,0 +1,112 @@ +{ + "module": "logs_log_group", + "author": "Ansible Cloud Team (@ansible-collections)", + "description": [ + "Create and manage log groups." + ], + "short_description": "Create and manage log groups", + "options": { + "log_group_name": { + "description": [ + "The name of the log group.", + "If you dont specify a name, AWS CloudFormation generates a unique ID for the log group." + ], + "type": "str" + }, + "purge_tags": { + "default": true, + "description": [ + "Remove tags not listed in I(tags)." + ], + "type": "bool" + }, + "kms_key_id": { + "description": [ + "The Amazon Resource Name (ARN) of the CMK to use when encrypting log data." + ], + "type": "str" + }, + "retention_in_days": { + "description": [ + "The number of days to retain the log events in the specified log group.", + "Possible values are: C(1), C(3), C(5), C(7), C(14), C(30), C(60), C(90), C(120), C(150), C(180), C(365), C(400), C(545), C(731), C(1827), and C(3653)." + ], + "type": "int", + "choices": [ + 1, + 3, + 5, + 7, + 14, + 30, + 60, + 90, + 120, + 150, + 180, + 365, + 400, + 545, + 731, + 1827, + 3653 + ] + }, + "state": { + "description": [ + "Goal state for resource.", + "I(state=present) creates the resource if it doesn't exist, or updates to the provided state if the resource already exists.", + "I(state=absent) ensures an existing instance is deleted.", + "I(state=list) get all the existing resources.", + "I(state=describe) or I(state=get) retrieves information on an existing resource." + ], + "type": "str", + "choices": [ + "present", + "absent", + "list", + "describe", + "get" + ], + "default": "present" + }, + "tags": { + "aliases": [ + "resource_tags" + ], + "description": [ + "A dict of tags to apply to the resource.", + "To remove all tags set I(tags={}) and I(purge_tags=true)." + ], + "type": "dict" + }, + "wait": { + "description": [ + "Wait for operation to complete before returning." + ], + "type": "bool", + "default": false + }, + "wait_timeout": { + "description": [ + "How many seconds to wait for an operation to complete before timing out." + ], + "type": "int", + "default": 320 + }, + "force": { + "description": [ + "Cancel IN_PROGRESS and PENDING resource requestes.", + "Because you can only perform a single operation on a given resource at a time, there might be cases where you need to cancel the current resource operation to make the resource available so that another operation may be performed on it." + ], + "type": "bool", + "default": false + } + }, + "version_added": "1.0.0", + "extends_documentation_fragment": [ + "amazon.aws.aws", + "amazon.aws.ec2", + "amazon.cloud.boto3" + ] +} diff --git a/tests/unit/cmd/fixtures/modules.yaml b/tests/unit/cmd/fixtures/modules.yaml new file mode 100644 index 0000000..742ce26 --- /dev/null +++ b/tests/unit/cmd/fixtures/modules.yaml @@ -0,0 +1,31 @@ +--- +- s3_bucket: + documentation: + short_description: Create and manage S3 buckets + description: + - Create and manage S3 buckets. +- logs_log_group: + documentation: + short_description: Create and manage log groups + description: + - Create and manage log groups. +- iam_role: + documentation: + short_description: Create and manage roles + description: + - Creates and manages new roles for your AWS account. +- backup_backup_vault: + documentation: + short_description: Create and manage logical containers where backups are stored + description: + - Creates and manages logical containers where backups are stored. +- backup_framework: + documentation: + short_description: Create and manage frameworks with one or more controls + description: + - Creates and manages frameworks with one or more controls. +- backup_report_plan: + documentation: + short_description: Create and manage report plans + description: + - Creates and manages report plans. diff --git a/tests/unit/utils/fixtures/raw_content.json b/tests/unit/cmd/fixtures/raw_content.json similarity index 100% rename from tests/unit/utils/fixtures/raw_content.json rename to tests/unit/cmd/fixtures/raw_content.json diff --git a/tests/unit/cmd/test_generator.py b/tests/unit/cmd/test_generator.py new file mode 100644 index 0000000..8170fe8 --- /dev/null +++ b/tests/unit/cmd/test_generator.py @@ -0,0 +1,68 @@ +# (c) 2022 Red Hat Inc. +# +# This file is part of Ansible +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + + +import os +import pytest +import json +from pathlib import Path + +import gouttelette.cmd.generator as g +import gouttelette.cmd.refresh_modules as rm +import gouttelette.cmd.refresh_schema as rs + + +def resources(filepath): + current = Path(os.path.dirname(os.path.abspath(__file__))) + with open(current / filepath) as fp: + return json.load(fp) + + +raw_content = resources("fixtures/raw_content.json") +expected_content = resources("fixtures/expected_content.json") + + +def test_Description_normalize(): + assert g.Description.normalize("a") == ["a."] + assert g.Description.normalize("") == [] + assert g.Description.normalize("CloudWatch") == ["CloudWatch."] + assert g.Description.normalize( + "The Amazon Resource Name (ARN) of the Amazon SQS queue to which Amazon S3 publishes a message." + ) == [ + "The Amazon Resource Name (ARN) of the Amazon SQS queue to which Amazon S3 publishes a message." + ] + assert g.Description.normalize( + "Setting this element to TRUE causes Amazon S3 to reject calls to PUT Bucket policy." + ) == [ + "Setting this element to C(True) causes Amazon S3 to reject calls to PUT Bucket policy." + ] + assert g.Description.normalize( + "Setting this element to TRUE causes Amazon S3 to reject calls to PUT Bucket policy" + ) == [ + "Setting this element to C(True) causes Amazon S3 to reject calls to PUT Bucket policy." + ] + assert g.Description.normalize( + "Possible values are: 1, 3, 5, 7, 14, 30, 60, 90, 120, 150, 180, 365, 400, 545, 731, 1827, and 3653." + ) == [ + "Possible values are: C(1), C(3), C(5), C(7), C(14), C(30), C(60), C(90), C(120), C(150), C(180), C(365), C(400), C(545), C(731), C(1827), and C(3653)." + ] + assert g.Description.normalize( + "Container for the transition rule that describes when noncurrent objects transition to the STANDARD_IA, ONEZONE_IA, INTELLIGENT_TIERING, GLACIER_IR, C(GLACIER), or DEEP_ARCHIVE storage class." + ) == [ + "Container for the transition rule that describes when noncurrent objects transition to the C(STANDARD_IA), C(ONEZONE_IA), C(INTELLIGENT_TIERING), C(GLACIER_IR), C(GLACIER), or C(DEEP_ARCHIVE) storage class." + ] + + +def test_generate_documentation(): + schema = rs.generate_schema(json.dumps(raw_content)) + module = rm.AnsibleModuleBaseAmazon(schema=schema) + added_ins = {"module": "1.0.0"} + documentation = g.generate_documentation( + module, + added_ins, + "1.0.0", + Path("tests/unit/cmd/fixtures"), + ) + assert documentation == expected_content diff --git a/tests/unit/cmd/test_refresh_modules.py b/tests/unit/cmd/test_refresh_modules.py new file mode 100644 index 0000000..5f5d87a --- /dev/null +++ b/tests/unit/cmd/test_refresh_modules.py @@ -0,0 +1,199 @@ +# (c) 2022 Red Hat Inc. +# +# This file is part of Ansible +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + + +import os +import pytest +import json +from pathlib import Path + +import gouttelette.cmd.refresh_modules as rm +import gouttelette.cmd.refresh_schema as rs +import gouttelette.cmd.generator as g + + +def resources(filepath): + current = Path(os.path.dirname(os.path.abspath(__file__))) + with open(current / filepath) as fp: + return json.load(fp) + + +raw_content = resources("fixtures/raw_content.json") +expected_content = resources("fixtures/expected_content.json") + + +def test__gen_required_if(): + expected_required_if = [ + ["state", "present", ["log_group_name"], True], + ["state", "absent", ["log_group_name"], True], + ["state", "get", ["log_group_name"], True], + ] + schema = rs.generate_schema(json.dumps(raw_content)) + assert rm.gen_required_if(schema) == expected_required_if + + +def test__generate_params(): + expected_params = """ +params['kms_key_id'] = module.params.get('kms_key_id') +params['log_group_name'] = module.params.get('log_group_name') +params['retention_in_days'] = module.params.get('retention_in_days') +params['tags'] = module.params.get('tags')""" + schema = rs.generate_schema(json.dumps(raw_content)) + module = rm.AnsibleModuleBaseAmazon(schema=schema) + added_ins = {"module": "1.0.0"} + documentation = g.generate_documentation( + module, + added_ins, + "", + Path("tests/unit/cmd/fixtures"), + ) + assert rm.generate_params(documentation["options"]) == expected_params + + +def test__format_documentation(): + expected = """r''' +module: logs_log_group +short_description: Create and manage log groups +description: +- Create and manage log groups. +options: + force: + default: false + description: + - Cancel IN_PROGRESS and PENDING resource requestes. + - Because you can only perform a single operation on a given resource at a + time, there might be cases where you need to cancel the current resource + operation to make the resource available so that another operation may + be performed on it. + type: bool + kms_key_id: + description: + - The Amazon Resource Name (ARN) of the CMK to use when encrypting log data. + type: str + log_group_name: + description: + - The name of the log group. + - If you dont specify a name, AWS CloudFormation generates a unique ID for + the log group. + type: str + purge_tags: + default: true + description: + - Remove tags not listed in I(tags). + type: bool + retention_in_days: + choices: + - 1 + - 3 + - 5 + - 7 + - 14 + - 30 + - 60 + - 90 + - 120 + - 150 + - 180 + - 365 + - 400 + - 545 + - 731 + - 1827 + - 3653 + description: + - The number of days to retain the log events in the specified log group. + - 'Possible values are: C(1), C(3), C(5), C(7), C(14), C(30), C(60), C(90), + C(120), C(150), C(180), C(365), C(400), C(545), C(731), C(1827), and C(3653).' + type: int + state: + choices: + - present + - absent + - list + - describe + - get + default: present + description: + - Goal state for resource. + - I(state=present) creates the resource if it doesn't exist, or updates to + the provided state if the resource already exists. + - I(state=absent) ensures an existing instance is deleted. + - I(state=list) get all the existing resources. + - I(state=describe) or I(state=get) retrieves information on an existing resource. + type: str + tags: + aliases: + - resource_tags + description: + - A dict of tags to apply to the resource. + - To remove all tags set I(tags={}) and I(purge_tags=true). + type: dict + wait: + default: false + description: + - Wait for operation to complete before returning. + type: bool + wait_timeout: + default: 320 + description: + - How many seconds to wait for an operation to complete before timing out. + type: int +author: Ansible Cloud Team (@ansible-collections) +version_added: 1.0.0 +extends_documentation_fragment: +- amazon.aws.aws +- amazon.aws.ec2 +- amazon.cloud.boto3 +'''""" + + schema = rs.generate_schema(json.dumps(raw_content)) + module = rm.AnsibleModuleBaseAmazon(schema=schema) + added_ins = {"module": "1.0.0"} + documentation = g.generate_documentation( + module, + added_ins, + "1.0.0", + Path("tests/unit/cmd/fixtures"), + ) + + assert rm.format_documentation(documentation) == expected + + +def test__generate_argument_spec(): + expected_argument_spec = """ +argument_spec['log_group_name'] = {'type': 'str'} +argument_spec['kms_key_id'] = {'type': 'str'} +argument_spec['retention_in_days'] = {'type': 'int', 'choices': [1, 3, 5, 7, 14, 30, 60, 90, 120, 150, 180, 365, 400, 545, 731, 1827, 3653]} +argument_spec['tags'] = {'type': 'dict', 'aliases': ['resource_tags']} +argument_spec['state'] = {'type': 'str', 'choices': ['present', 'absent', 'list', 'describe', 'get'], 'default': 'present'} +argument_spec['wait'] = {'type': 'bool', 'default': False} +argument_spec['wait_timeout'] = {'type': 'int', 'default': 320} +argument_spec['force'] = {'type': 'bool', 'default': False} +argument_spec['purge_tags'] = {'type': 'bool', 'default': True}""" + schema = rs.generate_schema(json.dumps(raw_content)) + module = rm.AnsibleModuleBaseAmazon(schema=schema) + added_ins = {"module": "1.0.0"} + documentation = g.generate_documentation( + module, + added_ins, + "", + Path("tests/unit/cmd/fixtures"), + ) + + assert rm.generate_argument_spec(documentation["options"]) == expected_argument_spec + + +def test_AnsibleModuleBaseAmazon(): + schema = rs.generate_schema(json.dumps(raw_content)) + module = rm.AnsibleModuleBaseAmazon(schema=schema) + assert module.name == "logs_log_group" + + +def test_AnsibleModuleBaseAmazon_is_trusted(): + schema = rs.generate_schema(json.dumps(raw_content)) + module = rm.AnsibleModuleBaseAmazon(schema=schema) + assert module.is_trusted(Path("tests/unit/cmd/fixtures")) + module.name = "something_we_dont_trust" + assert not module.is_trusted(Path("tests/unit/cmd/fixtures")) diff --git a/tests/unit/utils/test_utils.py b/tests/unit/utils/test_utils.py index f8c036b..023280c 100644 --- a/tests/unit/utils/test_utils.py +++ b/tests/unit/utils/test_utils.py @@ -50,14 +50,3 @@ def test_python_type(): assert utils.python_type("boolean") == "bool" assert utils.python_type(["object", "string"]) == "dict" assert utils.python_type(["string", "object"]) == "str" - - -@patch("pkg_resources.resource_string") -def test_UtilsBase_is_trusted(m_resource_string): - m_resource_string.return_value = "---\n- foo:\n- bar:\n some: key\n" - module = utils.UtilsBase("no-trusted-pantoute") - assert module.is_trusted("a-generator") is False - module = utils.UtilsBase("foo") - assert module.is_trusted("a-generator") is True - module = utils.UtilsBase("bar") - assert module.is_trusted("a-generator") is True